From 8e3689cffa648d290e1215e7bd81f1824f9b01bc Mon Sep 17 00:00:00 2001 From: Andy Grove Date: Mon, 6 May 2024 08:26:12 -0600 Subject: [PATCH] fix regression --- .../apache/comet/ExtendedExplainInfo.scala | 2 +- .../apache/comet/CometExpressionSuite.scala | 21 ++++++++++--------- .../org/apache/spark/sql/CometTestBase.scala | 4 ++-- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/spark/src/main/scala/org/apache/comet/ExtendedExplainInfo.scala b/spark/src/main/scala/org/apache/comet/ExtendedExplainInfo.scala index befa3baef..8e5aee8b6 100644 --- a/spark/src/main/scala/org/apache/comet/ExtendedExplainInfo.scala +++ b/spark/src/main/scala/org/apache/comet/ExtendedExplainInfo.scala @@ -32,7 +32,7 @@ class ExtendedExplainInfo extends ExtendedExplainGenerator { override def generateExtendedInfo(plan: SparkPlan): String = { val info = extensionInfo(plan) - info.mkString("\n").trim + info.toSeq.sorted.mkString("\n").trim } private def getActualPlan(node: TreeNode[_]): TreeNode[_] = { diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index c8c7ffd5c..83041b982 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -1382,29 +1382,30 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { Seq( ( s"SELECT cast(make_interval(c0, c1, c0, c1, c0, c0, c2) as string) as C from $table", - "make_interval is not supported"), + Set("make_interval is not supported")), ( "SELECT " + "date_part('YEAR', make_interval(c0, c1, c0, c1, c0, c0, c2))" + " + " + "date_part('MONTH', make_interval(c0, c1, c0, c1, c0, c0, c2))" + s" as yrs_and_mths from $table", - "extractintervalyears is not supported\n" + - "extractintervalmonths is not supported"), + Set( + "extractintervalyears is not supported", + "extractintervalmonths is not supported")), ( s"SELECT sum(c0), sum(c2) from $table group by c1", - "Native shuffle is not enabled\n" + - "AQEShuffleRead is not supported"), + Set("Native shuffle is not enabled", "AQEShuffleRead is not supported")), ( "SELECT A.c1, A.sum_c0, A.sum_c2, B.casted from " + s"(SELECT c1, sum(c0) as sum_c0, sum(c2) as sum_c2 from $table group by c1) as A, " + s"(SELECT c1, cast(make_interval(c0, c1, c0, c1, c0, c0, c2) as string) as casted from $table) as B " + "where A.c1 = B.c1 ", - "Native shuffle is not enabled\n" + - "AQEShuffleRead is not supported\n" + - "make_interval is not supported\n" + - "BroadcastExchange is not supported\n" + - "BroadcastHashJoin disabled because not all child plans are native")) + Set( + "Native shuffle is not enabled", + "AQEShuffleRead is not supported", + "make_interval is not supported", + "BroadcastExchange is not supported", + "BroadcastHashJoin disabled because not all child plans are native"))) .foreach(test => { val qry = test._1 val expected = test._2 diff --git a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala index 8fda13617..fb174a5d3 100644 --- a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala +++ b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala @@ -244,7 +244,7 @@ abstract class CometTestBase protected def checkSparkAnswerAndCompareExplainPlan( df: DataFrame, - expectedInfo: String): Unit = { + expectedInfo: Set[String]): Unit = { var expected: Array[Row] = Array.empty var dfSpark: Dataset[Row] = null withSQLConf( @@ -263,7 +263,7 @@ abstract class CometTestBase } val extendedInfo = new ExtendedExplainInfo().generateExtendedInfo(dfComet.queryExecution.executedPlan) - assert(extendedInfo.equalsIgnoreCase(expectedInfo)) + assert(extendedInfo.equalsIgnoreCase(expectedInfo.toSeq.sorted.mkString("\n"))) } private var _spark: SparkSession = _