Skip to content

Commit

Permalink
adding support for negative integers and modulo checks for int above …
Browse files Browse the repository at this point in the history
…256, also added test case
  • Loading branch information
vaibhawvipul committed Jun 11, 2024
1 parent 351d0ff commit cb2924f
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 5 deletions.
18 changes: 13 additions & 5 deletions core/src/execution/datafusion/expressions/scalar_funcs/chr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,18 @@ pub fn chr(args: &[ArrayRef]) -> Result<ArrayRef> {
.iter()
.map(|integer: Option<i64>| {
integer
.map(|integer| match core::char::from_u32(integer as u32) {
Some(integer) => Ok(integer.to_string()),
None => {
exec_err!("requested character too large for encoding.")
.map(|integer| {
let adjusted_integer = if integer >= 0 { integer % 256 } else { integer };

match core::char::from_u32(adjusted_integer as u32) {
Some(integer) => Ok(integer.to_string()),
None => {
if integer < 0 {
Ok("".to_string())
} else {
exec_err!("requested character not compatible for encoding.")
}
}
}
})
.transpose()
Expand Down Expand Up @@ -110,7 +118,7 @@ fn handle_chr_fn(args: &[ColumnarValue]) -> Result<ColumnarValue> {
Some(ch) => Ok(ColumnarValue::Scalar(ScalarValue::Utf8(Some(
ch.to_string(),
)))),
None => exec_err!("requested character too large for encoding."),
None => exec_err!("requested character was incompatible for encoding."),
}
}
ColumnarValue::Scalar(ScalarValue::Int64(None)) => {
Expand Down
17 changes: 17 additions & 0 deletions spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -997,6 +997,23 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
}

test("Chr with negative and large value") {
Seq(false, true).foreach { dictionary =>
withSQLConf(
"parquet.enable.dictionary" -> dictionary.toString,
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
val table = "test0"
withTable(table) {
sql(s"create table $table(c9 int, c4 int) using parquet")
sql(
s"insert into $table values(0, 0), (61231231236, -61231231236), (-1700, 1700), (0, -4000), (-40, 40)")
val query = s"SELECT chr(c9), chr(c4) FROM $table"
checkSparkAnswerAndOperator(query)
}
}
}
}

test("InitCap") {
Seq(false, true).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
Expand Down

0 comments on commit cb2924f

Please sign in to comment.