Skip to content

Commit

Permalink
comet spark test case
Browse files Browse the repository at this point in the history
  • Loading branch information
vaibhawvipul committed Apr 30, 2024
1 parent fe18d81 commit 938b0b3
Showing 1 changed file with 19 additions and 2 deletions.
21 changes: 19 additions & 2 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
(SQLConf.SESSION_LOCAL_TIMEZONE.key -> "UTC"),
(CometConf.COMET_CAST_STRING_TO_TIMESTAMP.key -> "true")) {
val values = Seq("2020-01-01T12:34:56.123456", "T2").toDF("a")
castTest(values, DataTypes.TimestampType)
val values = Seq("2020-01-01T12:34:56.123456", "T2", "-9?")
castTimestampTest(values.toDF("a"), DataTypes.TimestampType)
}
}

Expand Down Expand Up @@ -156,6 +156,23 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
}

private def castTimestampTest(input: DataFrame, toType: DataType) = {
withTempPath { dir =>
val data = roundtripParquet(input, dir).coalesce(1)
data.createOrReplaceTempView("t")

withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) {
// cast() should return null for invalid inputs when ansi mode is disabled
val df = data.withColumn("converted", col("a").cast(toType))
checkSparkAnswer(df)

// try_cast() should always return null for invalid inputs
val df2 = spark.sql(s"select try_cast(a as ${toType.sql}) from t")
checkSparkAnswer(df2)
}
}
}

private def castTest(input: DataFrame, toType: DataType): Unit = {
withTempPath { dir =>
val data = roundtripParquet(input, dir).coalesce(1)
Expand Down

0 comments on commit 938b0b3

Please sign in to comment.