diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometExecBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometExecBenchmark.scala index 3dd930f67..3ee37bd66 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometExecBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometExecBenchmark.scala @@ -274,23 +274,23 @@ object CometExecBenchmark extends CometBenchmarkBase { } override def runCometBenchmark(mainArgs: Array[String]): Unit = { -// runBenchmarkWithTable("Subquery", 1024 * 1024 * 10) { v => -// subqueryExecBenchmark(v) -// } -// -// runBenchmarkWithTable("Expand", 1024 * 1024 * 10) { v => -// expandExecBenchmark(v) -// } -// -// runBenchmarkWithTable("Project + Filter", 1024 * 1024 * 10) { v => -// for (fractionOfZeros <- List(0.0, 0.50, 0.95)) { -// numericFilterExecBenchmark(v, fractionOfZeros) -// } -// } -// -// runBenchmarkWithTable("Sort", 1024 * 1024 * 10) { v => -// sortExecBenchmark(v) -// } + runBenchmarkWithTable("Subquery", 1024 * 1024 * 10) { v => + subqueryExecBenchmark(v) + } + + runBenchmarkWithTable("Expand", 1024 * 1024 * 10) { v => + expandExecBenchmark(v) + } + + runBenchmarkWithTable("Project + Filter", 1024 * 1024 * 10) { v => + for (fractionOfZeros <- List(0.0, 0.50, 0.95)) { + numericFilterExecBenchmark(v, fractionOfZeros) + } + } + + runBenchmarkWithTable("Sort", 1024 * 1024 * 10) { v => + sortExecBenchmark(v) + } runBenchmarkWithTable("BloomFilterAggregate", 1024 * 1024 * 10) { v => for (card <- List(100, 1024, 1024 * 1024)) {