-
Notifications
You must be signed in to change notification settings - Fork 169
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Implement Spark unhex #342
Changes from 11 commits
5dbd4aa
04bb619
6cb88c7
c649aef
bb4ad43
a0bdbbe
70c9ddd
663aef5
bfe92c4
966d307
97eae4b
a378f74
112c7c6
6146f3e
1de0887
bd07fed
36baf8e
d5a1c46
fb1c24a
c5c3fcd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -52,6 +52,9 @@ use num::{ | |
}; | ||
use unicode_segmentation::UnicodeSegmentation; | ||
|
||
mod unhex; | ||
use unhex::spark_unhex; | ||
|
||
macro_rules! make_comet_scalar_udf { | ||
($name:expr, $func:ident, $data_type:ident) => {{ | ||
let scalar_func = CometScalarFunction::new( | ||
|
@@ -105,6 +108,10 @@ pub fn create_comet_physical_fun( | |
"make_decimal" => { | ||
make_comet_scalar_udf!("make_decimal", spark_make_decimal, data_type) | ||
} | ||
"unhex" => { | ||
let func = Arc::new(spark_unhex); | ||
make_comet_scalar_udf!("unhex", func, without data_type) | ||
} | ||
"decimal_div" => { | ||
make_comet_scalar_udf!("decimal_div", spark_decimal_div, data_type) | ||
} | ||
|
@@ -123,11 +130,10 @@ pub fn create_comet_physical_fun( | |
make_comet_scalar_udf!(spark_func_name, wrapped_func, without data_type) | ||
} | ||
_ => { | ||
let fun = BuiltinScalarFunction::from_str(fun_name); | ||
if fun.is_err() { | ||
Ok(ScalarFunctionDefinition::UDF(registry.udf(fun_name)?)) | ||
if let Ok(fun) = BuiltinScalarFunction::from_str(fun_name) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unrelated, but more idiomatic IMO |
||
Ok(ScalarFunctionDefinition::BuiltIn(fun)) | ||
} else { | ||
Ok(ScalarFunctionDefinition::BuiltIn(fun?)) | ||
Ok(ScalarFunctionDefinition::UDF(registry.udf(fun_name)?)) | ||
} | ||
} | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,149 @@ | ||
// Licensed to the Apache Software Foundation (ASF) under one | ||
// or more contributor license agreements. See the NOTICE file | ||
// distributed with this work for additional information | ||
// regarding copyright ownership. The ASF licenses this file | ||
// to you under the Apache License, Version 2.0 (the | ||
// "License"); you may not use this file except in compliance | ||
// with the License. You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, | ||
// software distributed under the License is distributed on an | ||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
// KIND, either express or implied. See the License for the | ||
// specific language governing permissions and limitations | ||
// under the License. | ||
|
||
use std::sync::Arc; | ||
|
||
use arrow_array::{Array, OffsetSizeTrait}; | ||
use arrow_schema::DataType; | ||
use datafusion::logical_expr::ColumnarValue; | ||
use datafusion_common::{cast::as_generic_string_array, exec_err, DataFusionError, ScalarValue}; | ||
|
||
fn unhex(string: &str, result: &mut Vec<u8>) -> Result<(), DataFusionError> { | ||
if string.is_empty() { | ||
return Ok(()); | ||
} | ||
|
||
// Adjust the string if it has an odd length, and prepare to add a padding byte if needed. | ||
let needs_padding = string.len() % 2 != 0; | ||
let adjusted_string = if needs_padding { &string[1..] } else { string }; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If I understand this correctly, There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Here is the logic in Spark 3.4.2 for handling the first char if the input is padded, for reference. It looks like there is some validation of the first digit that we do not have in this PR and it also looks like the unhexed digit is stored in the output is used in the return value if the length of the input string is 1. It would be good to make sure that we have tests covering this case. if ((bytes.length & 0x01) != 0) {
// padding with '0'
if (bytes(0) < 0) {
return null
}
val v = Hex.unhexDigits(bytes(0))
if (v == -1) {
return null
}
out(0) = v
i += 1
oddShift = 1
} |
||
|
||
let mut iter = adjusted_string.chars().peekable(); | ||
while let (Some(high_char), Some(low_char)) = (iter.next(), iter.next()) { | ||
let high = high_char | ||
.to_digit(16) | ||
.ok_or_else(|| DataFusionError::Internal("Invalid hex character".to_string()))?; | ||
let low = low_char | ||
.to_digit(16) | ||
.ok_or_else(|| DataFusionError::Internal("Invalid hex character".to_string()))?; | ||
|
||
result.push((high << 4 | low) as u8); | ||
} | ||
|
||
if needs_padding { | ||
result.push(0); | ||
} | ||
|
||
Ok(()) | ||
} | ||
|
||
fn spark_unhex_inner<T: OffsetSizeTrait>( | ||
array: &ColumnarValue, | ||
fail_on_error: bool, | ||
) -> Result<ColumnarValue, DataFusionError> { | ||
match array { | ||
ColumnarValue::Array(array) => { | ||
let string_array = as_generic_string_array::<T>(array)?; | ||
|
||
let mut builder = arrow::array::BinaryBuilder::new(); | ||
let mut encoded = Vec::new(); | ||
|
||
for i in 0..string_array.len() { | ||
let string = string_array.value(i); | ||
|
||
if unhex(string, &mut encoded).is_ok() { | ||
builder.append_value(encoded.as_slice()); | ||
encoded.clear(); | ||
} else if fail_on_error { | ||
return exec_err!("Input to unhex is not a valid hex string: {string}"); | ||
} else { | ||
builder.append_null(); | ||
} | ||
} | ||
Ok(ColumnarValue::Array(Arc::new(builder.finish()))) | ||
} | ||
ColumnarValue::Scalar(ScalarValue::Utf8(Some(string))) => { | ||
let mut encoded = Vec::new(); | ||
|
||
if unhex(string, &mut encoded).is_ok() { | ||
Ok(ColumnarValue::Scalar(ScalarValue::Binary(Some(encoded)))) | ||
} else if fail_on_error { | ||
exec_err!("Input to unhex is not a valid hex string: {string}") | ||
} else { | ||
Ok(ColumnarValue::Scalar(ScalarValue::Binary(None))) | ||
} | ||
} | ||
_ => { | ||
exec_err!( | ||
"The first argument must be a string scalar or array, but got: {:?}", | ||
array | ||
) | ||
} | ||
} | ||
} | ||
|
||
pub(super) fn spark_unhex(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusionError> { | ||
if args.len() > 2 { | ||
return exec_err!("unhex takes at most 2 arguments, but got: {}", args.len()); | ||
} | ||
|
||
let val_to_unhex = &args[0]; | ||
let fail_on_error = if args.len() == 2 { | ||
match &args[1] { | ||
ColumnarValue::Scalar(ScalarValue::Boolean(Some(fail_on_error))) => *fail_on_error, | ||
_ => { | ||
return exec_err!( | ||
"The second argument must be boolean scalar, but got: {:?}", | ||
args[1] | ||
); | ||
} | ||
} | ||
} else { | ||
false | ||
}; | ||
|
||
match val_to_unhex.data_type() { | ||
DataType::Utf8 => spark_unhex_inner::<i32>(val_to_unhex, fail_on_error), | ||
DataType::LargeUtf8 => spark_unhex_inner::<i64>(val_to_unhex, fail_on_error), | ||
other => exec_err!( | ||
"The first argument must be a string scalar or array, but got: {:?}", | ||
other | ||
), | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod test { | ||
use super::unhex; | ||
|
||
#[test] | ||
fn test_unhex() -> Result<(), Box<dyn std::error::Error>> { | ||
let mut result = Vec::new(); | ||
|
||
unhex("537061726B2053514C", &mut result)?; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could we also have a test for the case where the input is padded? |
||
let result_str = std::str::from_utf8(&result)?; | ||
assert_eq!(result_str, "Spark SQL"); | ||
result.clear(); | ||
|
||
assert!(unhex("hello", &mut result).is_err()); | ||
result.clear(); | ||
|
||
unhex("", &mut result)?; | ||
assert!(result.is_empty()); | ||
|
||
Ok(()) | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1301,24 +1301,26 @@ impl PhysicalPlanner { | |
.iter() | ||
.map(|x| x.data_type(input_schema.as_ref())) | ||
.collect::<Result<Vec<_>, _>>()?; | ||
|
||
let data_type = match expr.return_type.as_ref().map(to_arrow_datatype) { | ||
Some(t) => t, | ||
None => { | ||
// If no data type is provided from Spark, we'll use DF's return type from the | ||
// scalar function | ||
// Note this assumes the `fun_name` is a defined function in DF. Otherwise, it'll | ||
// throw error. | ||
let fun = BuiltinScalarFunction::from_str(fun_name); | ||
if fun.is_err() { | ||
|
||
if let Ok(fun) = BuiltinScalarFunction::from_str(fun_name) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unrelated, but more idiomatic IMO |
||
fun.return_type(&input_expr_types)? | ||
} else { | ||
self.session_ctx | ||
.udf(fun_name)? | ||
.inner() | ||
.return_type(&input_expr_types)? | ||
} else { | ||
fun?.return_type(&input_expr_types)? | ||
} | ||
} | ||
}; | ||
|
||
let fun_expr = | ||
create_comet_physical_fun(fun_name, data_type.clone(), &self.session_ctx.state())?; | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
package org.apache.comet.shims | ||
|
||
import org.apache.spark.sql.catalyst.expressions._ | ||
|
||
/** | ||
* `ShimCometExpr` parses the `Unhex` expression assuming that the catalyst version is 3.2.x. | ||
tshauck marked this conversation as resolved.
Show resolved
Hide resolved
|
||
*/ | ||
trait ShimCometExpr { | ||
tshauck marked this conversation as resolved.
Show resolved
Hide resolved
|
||
def unhexSerde(unhex: Unhex): (Expression, Expression) = { | ||
(unhex.child, Literal(false)) | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
package org.apache.comet.shims | ||
|
||
import org.apache.spark.sql.catalyst.expressions._ | ||
|
||
/** | ||
* `ShimCometExpr` parses the `Unhex` expression assuming that the catalyst version is 3.3.x. | ||
*/ | ||
trait ShimCometExpr { | ||
def unhexSerde(unhex: Unhex): (Expression, Expression) = { | ||
(unhex.child, Literal(false)) | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
package org.apache.comet.shims | ||
|
||
import org.apache.spark.sql.catalyst.expressions._ | ||
|
||
/** | ||
* `ShimCometExpr` parses the `Unhex` expression assuming that the catalyst version is 3.4.x. | ||
*/ | ||
trait ShimCometExpr { | ||
def unhexSerde(unhex: Unhex): (Expression, Expression) = { | ||
(unhex.child, Literal(unhex.failOnError)) | ||
} | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks for adding the minor version shims. These are going to help me with some of work around supporting
cast
.