From 1bb6d0467b4ebb021d0b660919f08975d3d73985 Mon Sep 17 00:00:00 2001 From: Andy Grove Date: Fri, 7 Jun 2024 10:25:09 -0600 Subject: [PATCH] remove COMET_ANSI_MODE_ENABLED --- common/src/main/scala/org/apache/comet/CometConf.scala | 9 --------- .../scala/org/apache/comet/serde/QueryPlanSerde.scala | 7 ++----- .../src/test/scala/org/apache/comet/CometCastSuite.scala | 4 +--- .../scala/org/apache/comet/CometExpressionSuite.scala | 1 - 4 files changed, 3 insertions(+), 18 deletions(-) diff --git a/common/src/main/scala/org/apache/comet/CometConf.scala b/common/src/main/scala/org/apache/comet/CometConf.scala index 42fb5fb4c..c20071806 100644 --- a/common/src/main/scala/org/apache/comet/CometConf.scala +++ b/common/src/main/scala/org/apache/comet/CometConf.scala @@ -383,15 +383,6 @@ object CometConf extends ShimCometConf { .toSequence .createWithDefault(Seq("Range,InMemoryTableScan")) - val COMET_ANSI_MODE_ENABLED: ConfigEntry[Boolean] = conf("spark.comet.ansi.enabled") - .internal() - .doc( - "Comet does not respect ANSI mode in most cases and by default will not accelerate " + - "queries when ansi mode is enabled. Enable this setting to test Comet's experimental " + - "support for ANSI mode. This should not be used in production.") - .booleanConf - .createWithDefault(COMET_ANSI_MODE_ENABLED_DEFAULT) - val COMET_CAST_ALLOW_INCOMPATIBLE: ConfigEntry[Boolean] = conf("spark.comet.cast.allowIncompatible") .doc( diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala index c53f3248d..1966e7412 100644 --- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala +++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala @@ -700,11 +700,8 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde with CometExprShim case c @ Cast(child, dt, timeZoneId, _) => handleCast(child, inputs, dt, timeZoneId, evalMode(c)) - case expr if isUnsupportedAnsiExpr(expr) && !CometConf.COMET_ANSI_MODE_ENABLED.get() => - withInfo( - expr, - "ANSI mode not supported. " + - s"Set ${CometConf.COMET_ANSI_MODE_ENABLED.key}=true to enable it anyway.") + case expr if isUnsupportedAnsiExpr(expr) => + withInfo(expr, "ANSI mode not supported") None case add @ Add(left, right, _) if supportedDataType(left.dataType) => diff --git a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala index 25343f933..127866165 100644 --- a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala @@ -965,9 +965,7 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper { } // with ANSI enabled, we should produce the same exception as Spark - withSQLConf( - (SQLConf.ANSI_ENABLED.key, "true"), - (CometConf.COMET_ANSI_MODE_ENABLED.key, "true")) { + withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) { // cast() should throw exception on invalid inputs when ansi mode is enabled val df = data.withColumn("converted", col("a").cast(toType)) diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 7516a0785..4260eb8ce 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -1552,7 +1552,6 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { def withAnsiMode(enabled: Boolean)(f: => Unit): Unit = { withSQLConf( SQLConf.ANSI_ENABLED.key -> enabled.toString, - CometConf.COMET_ANSI_MODE_ENABLED.key -> enabled.toString, CometConf.COMET_ENABLED.key -> "true", CometConf.COMET_EXEC_ENABLED.key -> "true")(f) }