From e844a0e39db66dff2128773e3bb4010508f89918 Mon Sep 17 00:00:00 2001 From: Kazuyuki Tanimura Date: Wed, 15 May 2024 14:56:43 -0700 Subject: [PATCH] test: Fix explain with exteded info comet test --- .../test/scala/org/apache/comet/CometExpressionSuite.scala | 2 +- spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index dbb46e078..40d15281b 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -1399,7 +1399,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { CometConf.COMET_EXEC_ENABLED.key -> "true", CometConf.COMET_SHUFFLE_ENFORCE_MODE_ENABLED.key -> "true", CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true", - "spark.sql.extendedExplainProvider" -> "org.apache.comet.ExtendedExplainInfo") { + "spark.sql.extendedExplainProviders" -> "org.apache.comet.ExtendedExplainInfo") { val table = "test" withTable(table) { sql(s"create table $table(c0 int, c1 int , c2 float) using parquet") diff --git a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala index 8e05bf26a..31c53508c 100644 --- a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala +++ b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala @@ -249,7 +249,7 @@ abstract class CometTestBase var dfSpark: Dataset[Row] = null withSQLConf( CometConf.COMET_ENABLED.key -> "false", - "spark.sql.extendedExplainProvider" -> "") { + "spark.sql.extendedExplainProviders" -> "") { dfSpark = Dataset.ofRows(spark, df.logicalPlan) expected = dfSpark.collect() } @@ -259,7 +259,7 @@ abstract class CometTestBase dfSpark.queryExecution.explainString(ExtendedMode), dfComet.queryExecution.explainString(ExtendedMode)) if (supportsExtendedExplainInfo(dfSpark.queryExecution)) { - assert(diff.contains(expectedInfo)) + assert(expectedInfo.exists(s => diff.contains(s))) } val extendedInfo = new ExtendedExplainInfo().generateExtendedInfo(dfComet.queryExecution.executedPlan)