Skip to content

Commit

Permalink
New UT is integrated with the legacy API
Browse files Browse the repository at this point in the history
  • Loading branch information
erenavsarogullari committed Feb 25, 2024
1 parent 2cdf0c0 commit 9e1c311
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 51 deletions.
17 changes: 10 additions & 7 deletions core/src/execution/datafusion/expressions/cast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,18 @@ impl Cast {
let to_type = &self.data_type;
let cast_result = match (from_type, to_type) {
(DataType::Utf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::<i32>(&array),
(DataType::LargeUtf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::<i64>(&array),
_ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?
(DataType::LargeUtf8, DataType::Boolean) => {
Self::spark_cast_utf8_to_boolean::<i64>(&array)
}
_ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?,
};
let result = spark_cast(cast_result, from_type, &self.data_type);
Ok(result)
}

fn spark_cast_utf8_to_boolean<OffsetSize>(from: &dyn Array) -> ArrayRef
where
OffsetSize: OffsetSizeTrait,
where
OffsetSize: OffsetSizeTrait,
{
let array = from
.as_any()
Expand All @@ -100,10 +102,11 @@ impl Cast {
Some(value) => match value.to_ascii_lowercase().trim() {
"t" | "true" | "y" | "yes" | "1" => Some(true),
"f" | "false" | "n" | "no" | "0" => Some(false),
_ => None
_ => None,
},
_ => None
}).collect::<BooleanArray>();
_ => None,
})
.collect::<BooleanArray>();

Arc::new(output_array)
}
Expand Down
64 changes: 20 additions & 44 deletions spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,13 @@ package org.apache.comet

import java.util

import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable`

import org.apache.hadoop.fs.Path
import org.apache.spark.sql.{CometTestBase, DataFrame, Row}
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.functions.{col, expr}
import org.apache.spark.sql.functions.expr
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE
import org.apache.spark.sql.types.{DataTypes, Decimal, DecimalType, StructType}
import org.apache.spark.sql.types.{Decimal, DecimalType, StructType}

import org.apache.comet.CometSparkSessionExtensions.{isSpark32, isSpark34Plus}

Expand Down Expand Up @@ -1306,48 +1304,26 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}

test("test cast utf8 to boolean as compatible with Spark") {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") {
withTable("test_table1", "test_table2", "test_table3", "test_table4") {
// Supported boolean values as true by both Arrow and Spark
val inputDF = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES").toDF("c1")
inputDF.write.format("parquet").saveAsTable("test_table1")
val resultDF = this.spark
.table("test_table1")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr = resultDF.collectAsList().toList
resultArr.foreach(x => assert(x.get(1) == true))

// Supported boolean values as false by both Arrow and Spark
val inputDF2 = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No").toDF("c1")
inputDF2.write.format("parquet").saveAsTable("test_table2")
val resultDF2 = this.spark
.table("test_table2")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr2 = resultDF2.collectAsList().toList
resultArr2.foreach(x => assert(x.get(1) == false))

// Supported boolean values by Arrow but not Spark
val inputDF3 =
Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off").toDF("c1")
inputDF3.write.format("parquet").saveAsTable("test_table3")
val resultDF3 = this.spark
.table("test_table3")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr3 = resultDF3.collectAsList().toList
resultArr3.foreach(x => assert(x.get(1) == null))

// Invalid boolean casting values for Arrow and Spark
val inputDF4 = Seq("car", "Truck").toDF("c1")
inputDF4.write.format("parquet").saveAsTable("test_table4")
val resultDF4 = this.spark
.table("test_table4")
.withColumn("converted", col("c1").cast(DataTypes.BooleanType))
val resultArr4 = resultDF4.collectAsList().toList
resultArr4.foreach(x => assert(x.get(1) == null))
def testConvertedColumn(inputValues: Seq[String]): Unit = {
val table = "test_table"
withTable(table) {
val values = inputValues.map(x => s"('$x')").mkString(",")
sql(s"create table $table(base_column char(20)) using parquet")
sql(s"insert into $table values $values")
checkSparkAnswerAndOperator(
s"select base_column, cast(base_column as boolean) as converted_column from $table")
}
}

// Supported boolean values as true by both Arrow and Spark
testConvertedColumn(inputValues = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES"))
// Supported boolean values as false by both Arrow and Spark
testConvertedColumn(inputValues = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No"))
// Supported boolean values by Arrow but not Spark
testConvertedColumn(inputValues =
Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off"))
// Invalid boolean casting values for Arrow and Spark
testConvertedColumn(inputValues = Seq("car", "Truck"))
}

}

0 comments on commit 9e1c311

Please sign in to comment.