From 7fe20b136bb949a8e51f7f8d01ce548455ce0a9f Mon Sep 17 00:00:00 2001 From: KAZUYUKI TANIMURA Date: Thu, 16 May 2024 12:36:45 -0700 Subject: [PATCH] test: Fix explain with exteded info comet test (#436) * test: Fix explain with exteded info comet test * address review comments * address review comments --- .../comet/shims/ShimCometSparkSessionExtensions.scala | 5 +++++ .../test/scala/org/apache/comet/CometExpressionSuite.scala | 2 +- .../src/test/scala/org/apache/spark/sql/CometTestBase.scala | 6 ++---- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala b/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala index ffec1bd40..eb04c68ab 100644 --- a/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala +++ b/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala @@ -40,6 +40,11 @@ trait ShimCometSparkSessionExtensions { */ def getOffset(limit: LimitExec): Int = getOffsetOpt(limit).getOrElse(0) + /** + * TODO: delete after dropping Spark 3.x support and directly call + * SQLConf.EXTENDED_EXPLAIN_PROVIDERS.key + */ + protected val EXTENDED_EXPLAIN_PROVIDERS_KEY = "spark.sql.extendedExplainProviders" } object ShimCometSparkSessionExtensions { diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index dbb46e078..f3fd50e9e 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -1399,7 +1399,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { CometConf.COMET_EXEC_ENABLED.key -> "true", CometConf.COMET_SHUFFLE_ENFORCE_MODE_ENABLED.key -> "true", CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true", - "spark.sql.extendedExplainProvider" -> "org.apache.comet.ExtendedExplainInfo") { + EXTENDED_EXPLAIN_PROVIDERS_KEY -> "org.apache.comet.ExtendedExplainInfo") { val table = "test" withTable(table) { sql(s"create table $table(c0 int, c1 int , c2 float) using parquet") diff --git a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala index 8e05bf26a..112d35b13 100644 --- a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala +++ b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala @@ -247,9 +247,7 @@ abstract class CometTestBase expectedInfo: Set[String]): Unit = { var expected: Array[Row] = Array.empty var dfSpark: Dataset[Row] = null - withSQLConf( - CometConf.COMET_ENABLED.key -> "false", - "spark.sql.extendedExplainProvider" -> "") { + withSQLConf(CometConf.COMET_ENABLED.key -> "false", EXTENDED_EXPLAIN_PROVIDERS_KEY -> "") { dfSpark = Dataset.ofRows(spark, df.logicalPlan) expected = dfSpark.collect() } @@ -259,7 +257,7 @@ abstract class CometTestBase dfSpark.queryExecution.explainString(ExtendedMode), dfComet.queryExecution.explainString(ExtendedMode)) if (supportsExtendedExplainInfo(dfSpark.queryExecution)) { - assert(diff.contains(expectedInfo)) + assert(expectedInfo.forall(s => diff.contains(s))) } val extendedInfo = new ExtendedExplainInfo().generateExtendedInfo(dfComet.queryExecution.executedPlan)