Skip to content

Commit

Permalink
skip try_cast testing prior to Spark 3.4
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove committed May 2, 2024
1 parent b2d3d2d commit 889c754
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,6 @@ object CometCast {
true
case _ => false
}
case (_: DecimalType, _: DecimalType) =>
// TODO we need to file an issue for adding specific tests for casting
// between decimal types with difference precision and scale
true
case (DataTypes.DoubleType, _: DecimalType) =>
true
case (DataTypes.TimestampType, DataTypes.LongType) =>
Expand All @@ -65,7 +61,9 @@ object CometCast {
true
// END HACK

case (DataTypes.StringType, DataTypes.TimestampType) =>
case (_: DecimalType, _: DecimalType) =>
// TODO we need to file an issue for adding specific tests for casting
// between decimal types with different precision and scale
true
case (DataTypes.StringType, _) =>
canCastFromString(cast, toType)
Expand Down
21 changes: 15 additions & 6 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -791,6 +791,11 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}

private def castTest(input: DataFrame, toType: DataType): Unit = {

// we do not support the TryCast expression in Spark 3.2 and 3.3
// https://github.com/apache/datafusion-comet/issues/374
val testTryCast = CometSparkSessionExtensions.isSpark34Plus

withTempPath { dir =>
val data = roundtripParquet(input, dir).coalesce(1)
data.createOrReplaceTempView("t")
Expand All @@ -801,9 +806,11 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
checkSparkAnswerAndOperator(df)

// try_cast() should always return null for invalid inputs
val df2 =
spark.sql(s"select a, try_cast(a as ${toType.sql}) from t order by a")
checkSparkAnswerAndOperator(df2)
if (testTryCast) {
val df2 =
spark.sql(s"select a, try_cast(a as ${toType.sql}) from t order by a")
checkSparkAnswerAndOperator(df2)
}
}

// with ANSI enabled, we should produce the same exception as Spark
Expand Down Expand Up @@ -843,9 +850,11 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}

// try_cast() should always return null for invalid inputs
val df2 =
spark.sql(s"select a, try_cast(a as ${toType.sql}) from t order by a")
checkSparkAnswerAndOperator(df2)
if (testTryCast) {
val df2 =
spark.sql(s"select a, try_cast(a as ${toType.sql}) from t order by a")
checkSparkAnswerAndOperator(df2)
}
}
}
}
Expand Down

0 comments on commit 889c754

Please sign in to comment.