From 938b0b3a131c9d53cffff20fed7f73832b00e288 Mon Sep 17 00:00:00 2001 From: Vipul Vaibhaw Date: Tue, 30 Apr 2024 12:36:06 +0530 Subject: [PATCH] comet spark test case --- .../org/apache/comet/CometCastSuite.scala | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala index c5243c13b..06d937836 100644 --- a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala @@ -113,8 +113,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper { withSQLConf( (SQLConf.SESSION_LOCAL_TIMEZONE.key -> "UTC"), (CometConf.COMET_CAST_STRING_TO_TIMESTAMP.key -> "true")) { - val values = Seq("2020-01-01T12:34:56.123456", "T2").toDF("a") - castTest(values, DataTypes.TimestampType) + val values = Seq("2020-01-01T12:34:56.123456", "T2", "-9?") + castTimestampTest(values.toDF("a"), DataTypes.TimestampType) } } @@ -156,6 +156,23 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper { } } + private def castTimestampTest(input: DataFrame, toType: DataType) = { + withTempPath { dir => + val data = roundtripParquet(input, dir).coalesce(1) + data.createOrReplaceTempView("t") + + withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) { + // cast() should return null for invalid inputs when ansi mode is disabled + val df = data.withColumn("converted", col("a").cast(toType)) + checkSparkAnswer(df) + + // try_cast() should always return null for invalid inputs + val df2 = spark.sql(s"select try_cast(a as ${toType.sql}) from t") + checkSparkAnswer(df2) + } + } + } + private def castTest(input: DataFrame, toType: DataType): Unit = { withTempPath { dir => val data = roundtripParquet(input, dir).coalesce(1)