-
Notifications
You must be signed in to change notification settings - Fork 169
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Implement Spark unhex #342
Changes from 19 commits
5dbd4aa
04bb619
6cb88c7
c649aef
bb4ad43
a0bdbbe
70c9ddd
663aef5
bfe92c4
966d307
97eae4b
a378f74
112c7c6
6146f3e
1de0887
bd07fed
36baf8e
d5a1c46
fb1c24a
c5c3fcd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,229 @@ | ||
// Licensed to the Apache Software Foundation (ASF) under one | ||
// or more contributor license agreements. See the NOTICE file | ||
// distributed with this work for additional information | ||
// regarding copyright ownership. The ASF licenses this file | ||
// to you under the Apache License, Version 2.0 (the | ||
// "License"); you may not use this file except in compliance | ||
// with the License. You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, | ||
// software distributed under the License is distributed on an | ||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
// KIND, either express or implied. See the License for the | ||
// specific language governing permissions and limitations | ||
// under the License. | ||
|
||
use std::sync::Arc; | ||
|
||
use arrow_array::OffsetSizeTrait; | ||
use arrow_schema::DataType; | ||
use datafusion::logical_expr::ColumnarValue; | ||
use datafusion_common::{cast::as_generic_string_array, exec_err, DataFusionError, ScalarValue}; | ||
|
||
/// Helper function to convert a hex digit to a binary value. | ||
fn unhex_digit(c: u8) -> Result<u8, DataFusionError> { | ||
match c { | ||
b'0'..=b'9' => Ok(c - b'0'), | ||
b'A'..=b'F' => Ok(10 + c - b'A'), | ||
b'a'..=b'f' => Ok(10 + c - b'a'), | ||
_ => Err(DataFusionError::Execution( | ||
"Input to unhex_digit is not a valid hex digit".to_string(), | ||
)), | ||
} | ||
} | ||
|
||
/// Convert a hex string to binary and store the result in `result`. Returns an error if the input | ||
/// is not a valid hex string. | ||
fn unhex(hex_str: &str, result: &mut Vec<u8>) -> Result<(), DataFusionError> { | ||
let bytes = hex_str.as_bytes(); | ||
|
||
let mut i = 0; | ||
|
||
if (bytes.len() & 0x01) != 0 { | ||
let v = unhex_digit(bytes[0])?; | ||
|
||
result.push(v); | ||
i += 1; | ||
} | ||
|
||
while i < bytes.len() { | ||
let first = unhex_digit(bytes[i])?; | ||
let second = unhex_digit(bytes[i + 1])?; | ||
result.push((first << 4) | second); | ||
|
||
i += 2; | ||
} | ||
|
||
Ok(()) | ||
} | ||
|
||
fn spark_unhex_inner<T: OffsetSizeTrait>( | ||
array: &ColumnarValue, | ||
fail_on_error: bool, | ||
) -> Result<ColumnarValue, DataFusionError> { | ||
match array { | ||
ColumnarValue::Array(array) => { | ||
let string_array = as_generic_string_array::<T>(array)?; | ||
|
||
let mut encoded = Vec::new(); | ||
let mut builder = arrow::array::BinaryBuilder::new(); | ||
|
||
for item in string_array.iter() { | ||
if let Some(s) = item { | ||
if unhex(s, &mut encoded).is_ok() { | ||
builder.append_value(encoded.as_slice()); | ||
encoded.clear(); | ||
} else if fail_on_error { | ||
return exec_err!("Input to unhex is not a valid hex string: {s}"); | ||
} else { | ||
builder.append_null(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. All good, thank you very much for looking. I think you're right in that there was a bug, which should be fixed and tested in c5c3fcd. |
||
} | ||
} else { | ||
builder.append_null(); | ||
} | ||
} | ||
Ok(ColumnarValue::Array(Arc::new(builder.finish()))) | ||
} | ||
ColumnarValue::Scalar(ScalarValue::Utf8(Some(string))) => { | ||
let mut encoded = Vec::new(); | ||
|
||
if unhex(string, &mut encoded).is_ok() { | ||
Ok(ColumnarValue::Scalar(ScalarValue::Binary(Some(encoded)))) | ||
} else if fail_on_error { | ||
exec_err!("Input to unhex is not a valid hex string: {string}") | ||
} else { | ||
Ok(ColumnarValue::Scalar(ScalarValue::Binary(None))) | ||
} | ||
} | ||
ColumnarValue::Scalar(ScalarValue::Utf8(None)) => { | ||
Ok(ColumnarValue::Scalar(ScalarValue::Binary(None))) | ||
} | ||
_ => { | ||
exec_err!( | ||
"The first argument must be a string scalar or array, but got: {:?}", | ||
array | ||
) | ||
} | ||
} | ||
} | ||
|
||
pub(super) fn spark_unhex(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusionError> { | ||
if args.len() > 2 { | ||
return exec_err!("unhex takes at most 2 arguments, but got: {}", args.len()); | ||
} | ||
|
||
let val_to_unhex = &args[0]; | ||
let fail_on_error = if args.len() == 2 { | ||
match &args[1] { | ||
ColumnarValue::Scalar(ScalarValue::Boolean(Some(fail_on_error))) => *fail_on_error, | ||
_ => { | ||
return exec_err!( | ||
"The second argument must be boolean scalar, but got: {:?}", | ||
args[1] | ||
); | ||
} | ||
} | ||
} else { | ||
false | ||
}; | ||
|
||
match val_to_unhex.data_type() { | ||
DataType::Utf8 => spark_unhex_inner::<i32>(val_to_unhex, fail_on_error), | ||
DataType::LargeUtf8 => spark_unhex_inner::<i64>(val_to_unhex, fail_on_error), | ||
other => exec_err!( | ||
"The first argument must be a Utf8 or LargeUtf8: {:?}", | ||
other | ||
), | ||
} | ||
} | ||
|
||
#[cfg(test)] | ||
mod test { | ||
use std::sync::Arc; | ||
|
||
use arrow_array::make_array; | ||
use arrow_data::ArrayData; | ||
use datafusion::logical_expr::ColumnarValue; | ||
|
||
use super::unhex; | ||
|
||
#[test] | ||
fn test_spark_unhex_null() -> Result<(), Box<dyn std::error::Error>> { | ||
let input = ArrayData::new_null(&arrow_schema::DataType::Utf8, 2); | ||
let output = ArrayData::new_null(&arrow_schema::DataType::Binary, 2); | ||
|
||
let input = ColumnarValue::Array(Arc::new(make_array(input))); | ||
let expected = ColumnarValue::Array(Arc::new(make_array(output))); | ||
|
||
let result = super::spark_unhex(&[input])?; | ||
|
||
match (result, expected) { | ||
(ColumnarValue::Array(result), ColumnarValue::Array(expected)) => { | ||
assert_eq!(*result, *expected); | ||
Ok(()) | ||
} | ||
_ => Err("Unexpected result type".into()), | ||
} | ||
} | ||
|
||
#[test] | ||
fn test_unhex_valid() -> Result<(), Box<dyn std::error::Error>> { | ||
let mut result = Vec::new(); | ||
|
||
unhex("537061726B2053514C", &mut result)?; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could we also have a test for the case where the input is padded? |
||
let result_str = std::str::from_utf8(&result)?; | ||
assert_eq!(result_str, "Spark SQL"); | ||
result.clear(); | ||
|
||
unhex("1C", &mut result)?; | ||
assert_eq!(result, vec![28]); | ||
result.clear(); | ||
|
||
unhex("737472696E67", &mut result)?; | ||
assert_eq!(result, "string".as_bytes()); | ||
result.clear(); | ||
|
||
unhex("1", &mut result)?; | ||
assert_eq!(result, vec![1]); | ||
result.clear(); | ||
|
||
Ok(()) | ||
} | ||
|
||
#[test] | ||
fn test_odd_length() -> Result<(), Box<dyn std::error::Error>> { | ||
let mut result = Vec::new(); | ||
|
||
unhex("A1B", &mut result)?; | ||
assert_eq!(result, vec![10, 27]); | ||
result.clear(); | ||
|
||
unhex("0A1B", &mut result)?; | ||
assert_eq!(result, vec![10, 27]); | ||
result.clear(); | ||
|
||
Ok(()) | ||
} | ||
|
||
#[test] | ||
fn test_unhex_empty() { | ||
let mut result = Vec::new(); | ||
|
||
// Empty hex string | ||
unhex("", &mut result).unwrap(); | ||
assert!(result.is_empty()); | ||
} | ||
|
||
#[test] | ||
fn test_unhex_invalid() { | ||
let mut result = Vec::new(); | ||
|
||
// Invalid hex strings | ||
assert!(unhex("##", &mut result).is_err()); | ||
assert!(unhex("G123", &mut result).is_err()); | ||
assert!(unhex("hello", &mut result).is_err()); | ||
assert!(unhex("\0", &mut result).is_err()); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1301,24 +1301,26 @@ impl PhysicalPlanner { | |
.iter() | ||
.map(|x| x.data_type(input_schema.as_ref())) | ||
.collect::<Result<Vec<_>, _>>()?; | ||
|
||
let data_type = match expr.return_type.as_ref().map(to_arrow_datatype) { | ||
Some(t) => t, | ||
None => { | ||
// If no data type is provided from Spark, we'll use DF's return type from the | ||
// scalar function | ||
// Note this assumes the `fun_name` is a defined function in DF. Otherwise, it'll | ||
// throw error. | ||
let fun = BuiltinScalarFunction::from_str(fun_name); | ||
if fun.is_err() { | ||
|
||
if let Ok(fun) = BuiltinScalarFunction::from_str(fun_name) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unrelated, but more idiomatic IMO |
||
fun.return_type(&input_expr_types)? | ||
} else { | ||
self.session_ctx | ||
.udf(fun_name)? | ||
.inner() | ||
.return_type(&input_expr_types)? | ||
} else { | ||
fun?.return_type(&input_expr_types)? | ||
} | ||
} | ||
}; | ||
|
||
let fun_expr = | ||
create_comet_physical_fun(fun_name, data_type.clone(), &self.session_ctx.state())?; | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -45,12 +45,13 @@ import org.apache.comet.CometSparkSessionExtensions.{isCometOperatorEnabled, isC | |
import org.apache.comet.serde.ExprOuterClass.{AggExpr, DataType => ProtoDataType, Expr, ScalarFunc} | ||
import org.apache.comet.serde.ExprOuterClass.DataType.{DataTypeInfo, DecimalInfo, ListInfo, MapInfo, StructInfo} | ||
import org.apache.comet.serde.OperatorOuterClass.{AggregateMode => CometAggregateMode, JoinType, Operator} | ||
import org.apache.comet.shims.CometExprShim | ||
import org.apache.comet.shims.ShimQueryPlanSerde | ||
|
||
/** | ||
* An utility object for query plan and expression serialization. | ||
*/ | ||
object QueryPlanSerde extends Logging with ShimQueryPlanSerde { | ||
object QueryPlanSerde extends Logging with ShimQueryPlanSerde with CometExprShim { | ||
def emitWarning(reason: String): Unit = { | ||
logWarning(s"Comet native execution is disabled due to: $reason") | ||
} | ||
|
@@ -1396,6 +1397,16 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde { | |
val optExpr = scalarExprToProto("atan2", leftExpr, rightExpr) | ||
optExprWithInfo(optExpr, expr, left, right) | ||
|
||
case e: Unhex if !isSpark32 => | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Unrelated, but potentially we can vote in the community when to deprecate 3.2 support... |
||
val unHex = unhexSerde(e) | ||
|
||
val childExpr = exprToProtoInternal(unHex._1, inputs) | ||
val failOnErrorExpr = exprToProtoInternal(unHex._2, inputs) | ||
|
||
val optExpr = | ||
scalarExprToProtoWithReturnType("unhex", e.dataType, childExpr, failOnErrorExpr) | ||
optExprWithInfo(optExpr, expr, unHex._1) | ||
|
||
case e @ Ceil(child) => | ||
val childExpr = exprToProtoInternal(child, inputs) | ||
child.dataType match { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Unrelated, but more idiomatic IMO