diff --git a/common/src/main/scala/org/apache/comet/CometConf.scala b/common/src/main/scala/org/apache/comet/CometConf.scala index 1b40c7cd0..9b5cd242f 100644 --- a/common/src/main/scala/org/apache/comet/CometConf.scala +++ b/common/src/main/scala/org/apache/comet/CometConf.scala @@ -401,9 +401,17 @@ object CometConf extends ShimCometConf { .booleanConf .createWithDefault(false) +<<<<<<< HEAD + val COMET_REGEXP_ALLOW_INCOMPATIBLE: ConfigEntry[Boolean] = + conf("spark.comet.regexp.allowIncompatible") + .doc("Comet is not currently fully compatible with Spark for all regular expressions. " + + "Set this config to true to allow them anyway using Rust's regular expression engine. " + + "See compatibility guide for more information.") +======= val COMET_XXHASH64_ENABLED: ConfigEntry[Boolean] = conf("spark.comet.xxhash64.enabled") .doc("The xxhash64 implementation is not optimized yet and may cause performance issues.") +>>>>>>> apache/main .booleanConf .createWithDefault(false) diff --git a/core/Cargo.toml b/core/Cargo.toml index fe74b3554..7e6017027 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -136,3 +136,7 @@ harness = false [[bench]] name = "shuffle_writer" harness = false + +[[bench]] +name = "regexp" +harness = false diff --git a/core/benches/regexp.rs b/core/benches/regexp.rs new file mode 100644 index 000000000..ba5e7002e --- /dev/null +++ b/core/benches/regexp.rs @@ -0,0 +1,75 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + + +use std::sync::Arc; +use arrow::datatypes::Int32Type; +use arrow::error::ArrowError; +use arrow_array::{builder::StringBuilder, builder::StringDictionaryBuilder, RecordBatch}; +use arrow_schema::{DataType, Field, Schema}; +use comet::execution::datafusion::expressions::regexp::RLike; +use criterion::{criterion_group, criterion_main, Criterion}; +use datafusion::common::ScalarValue; +use datafusion_physical_expr::{expressions::Column, expressions::Literal, PhysicalExpr, expressions::LikeExpr}; + +fn criterion_benchmark(c: &mut Criterion) { + let batch = create_utf8_batch().unwrap(); + let child_expr = Arc::new(Column::new("foo", 0)); + let pattern_expr = Arc::new(Literal::new(ScalarValue::Utf8(Some("5[0-9]5".to_string())))); + let rlike = RLike::new(child_expr.clone(), pattern_expr.clone()); + let df_rlike = LikeExpr::new(false, false, child_expr, pattern_expr); + + let mut group = c.benchmark_group("regexp"); + group.bench_function("regexp_comet_rlike", |b| { + b.iter(|| rlike.evaluate(&batch).unwrap()); + }); + group.bench_function("regexp_datafusion_rlike", |b| { + b.iter(|| df_rlike.evaluate(&batch).unwrap()); + }); +} + +fn create_utf8_batch() -> Result { + let schema = Arc::new(Schema::new(vec![ + Field::new("a", DataType::Utf8, true), + Field::new("b", DataType::Dictionary(Box::new(DataType::Int32), Box::new(DataType::Utf8)), true) + ])); + let mut string_builder = StringBuilder::new(); + let mut string_dict_builder = StringDictionaryBuilder::::new(); + for i in 0..1000 { + if i % 10 == 0 { + string_builder.append_null(); + string_dict_builder.append_null(); + } else { + string_builder.append_value(format!("{}", i)); + string_dict_builder.append_value(format!("{}", i)); + } + } + let string_array = string_builder.finish(); + let string_dict_array2 = string_dict_builder.finish(); + RecordBatch::try_new(schema.clone(), vec![Arc::new(string_array), Arc::new(string_dict_array2)]) +} + +fn config() -> Criterion { + Criterion::default() +} + +criterion_group! { + name = benches; + config = config(); + targets = criterion_benchmark +} +criterion_main!(benches); diff --git a/core/src/execution/datafusion/expressions/mod.rs b/core/src/execution/datafusion/expressions/mod.rs index 5d5f58e0c..d18f41deb 100644 --- a/core/src/execution/datafusion/expressions/mod.rs +++ b/core/src/execution/datafusion/expressions/mod.rs @@ -34,6 +34,7 @@ pub mod bloom_filter_might_contain; pub mod correlation; pub mod covariance; pub mod negative; +pub mod regexp; pub mod stats; pub mod stddev; pub mod strings; diff --git a/core/src/execution/datafusion/expressions/regexp.rs b/core/src/execution/datafusion/expressions/regexp.rs new file mode 100644 index 000000000..8226bf148 --- /dev/null +++ b/core/src/execution/datafusion/expressions/regexp.rs @@ -0,0 +1,205 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +use crate::{errors::CometError, execution::datafusion::expressions::utils::down_cast_any_ref}; +use arrow_array::{builder::BooleanBuilder, Array, RecordBatch, StringArray}; +use arrow_schema::{DataType, Schema}; +use datafusion::logical_expr::ColumnarValue; +use datafusion_common::ScalarValue; +use datafusion_physical_expr::PhysicalExpr; +use regex::Regex; +use std::{ + any::Any, + fmt::{Display, Formatter}, + hash::Hasher, + sync::Arc, +}; + +#[derive(Debug, Hash)] +pub struct RLike { + child: Arc, + pattern: Arc, +} + +impl RLike { + pub fn new(child: Arc, pattern: Arc) -> Self { + Self { child, pattern } + } +} + +impl Display for RLike { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "RLike [child: {}, pattern: {}] ", + self.child, self.pattern + ) + } +} + +impl PartialEq for RLike { + fn eq(&self, other: &dyn Any) -> bool { + down_cast_any_ref(other) + .downcast_ref::() + .map(|x| self.child.eq(&x.child) && self.pattern.eq(&x.pattern)) + .unwrap_or(false) + } +} + +impl PhysicalExpr for RLike { + fn as_any(&self) -> &dyn Any { + self + } + + fn data_type(&self, _input_schema: &Schema) -> datafusion_common::Result { + Ok(DataType::Boolean) + } + + fn nullable(&self, input_schema: &Schema) -> datafusion_common::Result { + self.child.nullable(input_schema) + } + + fn evaluate(&self, batch: &RecordBatch) -> datafusion_common::Result { + if let ColumnarValue::Array(v) = self.child.evaluate(batch)? { + if let ColumnarValue::Scalar(ScalarValue::Utf8(Some(pattern))) = + self.pattern.evaluate(batch)? + { + // TODO cache Regex across invocations of evaluate() or create it in constructor + match Regex::new(&pattern) { + Ok(re) => { + let inputs = v + .as_any() + .downcast_ref::() + .expect("string array"); + let mut builder = BooleanBuilder::with_capacity(inputs.len()); + if inputs.is_nullable() { + for i in 0..inputs.len() { + if inputs.is_null(i) { + builder.append_null(); + } else { + builder.append_value(re.is_match(inputs.value(i))); + } + } + } else { + for i in 0..inputs.len() { + builder.append_value(re.is_match(inputs.value(i))); + } + } + Ok(ColumnarValue::Array(Arc::new(builder.finish()))) + } + Err(e) => Err(CometError::Internal(format!( + "Failed to compile regular expression: {e:?}" + )) + .into()), + } + } else { + Err( + CometError::Internal("Only scalar regex patterns are supported".to_string()) + .into(), + ) + } + } else { + // this should be unreachable because Spark will evaluate regex expressions against + // literal strings as part of query planning + Err(CometError::Internal("Only columnar inputs are supported".to_string()).into()) + } + } + + fn children(&self) -> Vec<&Arc> { + vec![&self.child] + } + + fn with_new_children( + self: Arc, + children: Vec>, + ) -> datafusion_common::Result> { + assert!(children.len() == 2); + Ok(Arc::new(RLike::new( + children[0].clone(), + children[1].clone(), + ))) + } + + fn dyn_hash(&self, state: &mut dyn Hasher) { + use std::hash::Hash; + let mut s = state; + self.hash(&mut s); + } +} + +#[cfg(test)] +mod test { + use std::sync::Arc; + use arrow_array::builder::{StringBuilder, StringDictionaryBuilder}; + use arrow_array::{Array, BooleanArray, RecordBatch}; + use arrow_array::types::Int32Type; + use arrow_schema::{ArrowError, DataType, Field, Schema}; + use datafusion_common::{DataFusionError, ScalarValue}; + use datafusion_expr::ColumnarValue; + use datafusion_physical_expr::expressions::Literal; + use datafusion_physical_expr_common::expressions::column::Column; + use datafusion_physical_expr_common::physical_expr::PhysicalExpr; + use super::*; + + #[test] + fn test_string_input() -> Result<(), DataFusionError> { + do_test(0, "5[0-9]5", 10) + } + + #[test] + fn test_dict_encoded_string_input() -> Result<(), DataFusionError> { + do_test(1, "5[0-9]5", 10) + } + + fn do_test(column: usize, pattern: &str, expected_count: usize) -> Result<(), DataFusionError> { + let batch = create_utf8_batch()?; + let child_expr = Arc::new(Column::new("foo", column)); + let pattern_expr = Arc::new(Literal::new(ScalarValue::Utf8(Some(pattern.to_string())))); + let rlike = RLike::new(child_expr, pattern_expr); + if let ColumnarValue::Array(array) = rlike.evaluate(&batch).unwrap() { + let array = array.as_any().downcast_ref::().expect("boolean array"); + assert_eq!(expected_count, array.true_count()); + } else { + unreachable!() + } + Ok(()) + } + + fn create_utf8_batch() -> Result { + let schema = Arc::new(Schema::new(vec![ + Field::new("a", DataType::Utf8, true), + Field::new("b", DataType::Dictionary(Box::new(DataType::Int32), Box::new(DataType::Utf8)), true) + ])); + let mut string_builder = StringBuilder::new(); + let mut string_dict_builder = StringDictionaryBuilder::::new(); + for i in 0..1000 { + if i % 10 == 0 { + string_builder.append_null(); + string_dict_builder.append_null(); + } else { + string_builder.append_value(format!("{}", i)); + string_dict_builder.append_value(format!("{}", i)); + } + } + let string_array = string_builder.finish(); + let string_dict_array2 = string_dict_builder.finish(); + RecordBatch::try_new(schema.clone(), vec![Arc::new(string_array), Arc::new(string_dict_array2)]) + } + +} \ No newline at end of file diff --git a/core/src/execution/datafusion/expressions/strings.rs b/core/src/execution/datafusion/expressions/strings.rs index cbbd4cfa4..39b7f7b93 100644 --- a/core/src/execution/datafusion/expressions/strings.rs +++ b/core/src/execution/datafusion/expressions/strings.rs @@ -143,8 +143,6 @@ make_predicate_function!(EndsWith, ends_with_dyn, ends_with_utf8_scalar_dyn); make_predicate_function!(Contains, contains_dyn, contains_utf8_scalar_dyn); -// make_predicate_function!(RLike, rlike_dyn, rlike_utf8_scalar_dyn); - #[derive(Debug, Hash)] pub struct SubstringExec { pub child: Arc, diff --git a/core/src/execution/datafusion/planner.rs b/core/src/execution/datafusion/planner.rs index cd9822d66..01462bd9b 100644 --- a/core/src/execution/datafusion/planner.rs +++ b/core/src/execution/datafusion/planner.rs @@ -71,6 +71,7 @@ use crate::{ covariance::Covariance, if_expr::IfExpr, negative, + regexp::RLike, scalar_funcs::create_comet_physical_fun, stats::StatsType, stddev::Stddev, @@ -435,6 +436,12 @@ impl PhysicalPlanner { Ok(Arc::new(Like::new(left, right))) } + ExprStruct::Rlike(expr) => { + let left = self.create_expr(expr.left.as_ref().unwrap(), input_schema.clone())?; + let right = self.create_expr(expr.right.as_ref().unwrap(), input_schema)?; + + Ok(Arc::new(RLike::new(left, right))) + } ExprStruct::CheckOverflow(expr) => { let child = self.create_expr(expr.child.as_ref().unwrap(), input_schema)?; let data_type = to_arrow_datatype(expr.datatype.as_ref().unwrap()); diff --git a/core/src/execution/proto/expr.proto b/core/src/execution/proto/expr.proto index 093b07b3c..292d7454f 100644 --- a/core/src/execution/proto/expr.proto +++ b/core/src/execution/proto/expr.proto @@ -54,7 +54,7 @@ message Expr { StartsWith startsWith = 27; EndsWith endsWith = 28; Contains contains = 29; - // RLike rlike = 30; + RLike rlike = 30; ScalarFunc scalarFunc = 31; EqualNullSafe eqNullSafe = 32; NotEqualNullSafe neqNullSafe = 33; @@ -374,10 +374,10 @@ message Like { Expr right = 2; } -// message RLike { -// Expr left = 1; -// Expr right = 2; -// } + message RLike { + Expr left = 1; + Expr right = 2; + } message StartsWith { Expr left = 1; diff --git a/docs/source/user-guide/compatibility.md b/docs/source/user-guide/compatibility.md index a16fd1b21..9791a169c 100644 --- a/docs/source/user-guide/compatibility.md +++ b/docs/source/user-guide/compatibility.md @@ -32,6 +32,13 @@ be used in production. There is an [epic](https://github.com/apache/datafusion-comet/issues/313) where we are tracking the work to fully implement ANSI support. +## Regular Expressions + +Comet uses the [regex](https://crates.io/crates/regex) crate to evaluate regular expressions, and it is expected that +this will produce different results to Java's regular expression engine in some cases. It also lacks support for +features such as backreferences. For these reasons, regular expression support is disabled by default and can be +enabled by setting `spark.comet.regexp.allowIncompatible=true`. + ## Cast Cast operations in Comet fall into three levels of support: diff --git a/docs/source/user-guide/configs.md b/docs/source/user-guide/configs.md index f232dc8b8..a9768e439 100644 --- a/docs/source/user-guide/configs.md +++ b/docs/source/user-guide/configs.md @@ -43,6 +43,7 @@ Comet provides the following configuration settings. | spark.comet.memory.overhead.min | Minimum amount of additional memory to be allocated per executor process for Comet, in MiB. | 402653184b | | spark.comet.nativeLoadRequired | Whether to require Comet native library to load successfully when Comet is enabled. If not, Comet will silently fallback to Spark when it fails to load the native lib. Otherwise, an error will be thrown and the Spark job will be aborted. | false | | spark.comet.parquet.enable.directBuffer | Whether to use Java direct byte buffer when reading Parquet. By default, this is false | false | +| spark.comet.regexp.allowIncompatible | Comet is not currently fully compatible with Spark for all regular expressions. Set this config to true to allow them anyway using Rust's regular expression engine. See compatibility guide for more information. | false | | spark.comet.rowToColumnar.supportedOperatorList | A comma-separated list of row-based operators that will be converted to columnar format when 'spark.comet.rowToColumnar.enabled' is true | Range,InMemoryTableScan | | spark.comet.scan.enabled | Whether to enable Comet scan. When this is turned on, Spark will use Comet to read Parquet data source. Note that to enable native vectorized execution, both this config and 'spark.comet.exec.enabled' need to be enabled. By default, this config is true. | true | | spark.comet.scan.preFetch.enabled | Whether to enable pre-fetching feature of CometScan. By default is disabled. | false | diff --git a/docs/source/user-guide/expressions.md b/docs/source/user-guide/expressions.md index 14b6f18d0..53f419a4c 100644 --- a/docs/source/user-guide/expressions.md +++ b/docs/source/user-guide/expressions.md @@ -66,32 +66,33 @@ The following Spark expressions are currently available. Any known compatibility ## String Functions -| Expression | Notes | -| --------------- | ----------------------------------------------------------------------------------------------------------- | -| Ascii | | -| BitLength | | -| Chr | | -| ConcatWs | | -| Contains | | -| EndsWith | | -| InitCap | | -| Instr | | -| Length | | -| Like | | -| Lower | | -| OctetLength | | -| Repeat | Negative argument for number of times to repeat causes exception | -| Replace | | -| Reverse | | -| StartsWith | | -| StringSpace | | -| StringTrim | | -| StringTrimBoth | | -| StringTrimLeft | | -| StringTrimRight | | -| Substring | | -| Translate | | -| Upper | | +| Expression | Notes | +| --------------- | ------------------------------------------------------------------ | +| Ascii | | +| BitLength | | +| Chr | | +| ConcatWs | | +| Contains | | +| EndsWith | | +| InitCap | | +| Instr | | +| Length | | +| Like | | +| Lower | | +| OctetLength | | +| Repeat | Negative argument for number of times to repeat causes exception | +| Replace | | +| Reverse | | +| RLike | RLike is disabled by default. See compatibility guide for details. | +| StartsWith | | +| StringSpace | | +| StringTrim | | +| StringTrimBoth | | +| StringTrimLeft | | +| StringTrimRight | | +| Substring | | +| Translate | | +| Upper | | ## Date/Time Functions diff --git a/docs/templates/compatibility-template.md b/docs/templates/compatibility-template.md index 64f871354..9aecef1c8 100644 --- a/docs/templates/compatibility-template.md +++ b/docs/templates/compatibility-template.md @@ -32,6 +32,13 @@ be used in production. There is an [epic](https://github.com/apache/datafusion-comet/issues/313) where we are tracking the work to fully implement ANSI support. +## Regular Expressions + +Comet uses the [regex](https://crates.io/crates/regex) crate to evaluate regular expressions, and it is expected that +this will produce different results to Java's regular expression engine in some cases. It also lacks support for +features such as backreferences. For these reasons, regular expression support is disabled by default and can be +enabled by setting `spark.comet.regexp.allowIncompatible=true`. + ## Cast Cast operations in Comet fall into three levels of support: diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala index c1c8b5c56..51331e0e7 100644 --- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala +++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala @@ -1101,24 +1101,46 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde with CometExprShim None } - // TODO waiting for arrow-rs update -// case RLike(left, right) => -// val leftExpr = exprToProtoInternal(left, inputs) -// val rightExpr = exprToProtoInternal(right, inputs) -// -// if (leftExpr.isDefined && rightExpr.isDefined) { -// val builder = ExprOuterClass.RLike.newBuilder() -// builder.setLeft(leftExpr.get) -// builder.setRight(rightExpr.get) -// -// Some( -// ExprOuterClass.Expr -// .newBuilder() -// .setRlike(builder) -// .build()) -// } else { -// None -// } + case RLike(left, right) => + // for now, we assume that all regular expressions are incompatible with Spark but + // later we can add logic to determine if a pattern will produce the same results + // in Rust, or even transpile the pattern to work around differences between the JVM + // and Rust regular expression engines + if (CometConf.COMET_REGEXP_ALLOW_INCOMPATIBLE.get()) { + + // we currently only support scalar regex patterns + right match { + case Literal(_, DataTypes.StringType) => + // supported + case _ => + withInfo(expr, "Only scalar patterns are supported") + return None + } + + val leftExpr = exprToProtoInternal(left, inputs) + val rightExpr = exprToProtoInternal(right, inputs) + + if (leftExpr.isDefined && rightExpr.isDefined) { + val builder = ExprOuterClass.RLike.newBuilder() + builder.setLeft(leftExpr.get) + builder.setRight(rightExpr.get) + + Some( + ExprOuterClass.Expr + .newBuilder() + .setRlike(builder) + .build()) + } else { + withInfo(expr, left, right) + None + } + } else { + withInfo( + expr, + "Regular expressions are disabled. " + + s"Set ${CometConf.COMET_REGEXP_ALLOW_INCOMPATIBLE.key}=true to enable them.") + None + } case StartsWith(left, right) => val leftExpr = exprToProtoInternal(left, inputs) diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 9a1851a51..1418119b4 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -599,6 +599,29 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } } + test("rlike") { + val table = "rlike_names" + withTable(table) { + sql(s"create table $table(id int, name varchar(20)) using parquet") + sql(s"insert into $table values(1,'James Smith')") + sql(s"insert into $table values(2,'Michael Rose')") + sql(s"insert into $table values(3,'Robert Williams')") + sql(s"insert into $table values(4,'Rames Rose')") + sql(s"insert into $table values(5,'Rames rose')") + + withSQLConf(CometConf.COMET_REGEXP_ALLOW_INCOMPATIBLE.key -> "true") { + val query = sql(s"select id from $table where name rlike 'R[a-z]+s [Rr]ose'") + checkSparkAnswerAndOperator(query) + + // test that we fall back to Spark if the pattern is not a scalar value + val query2 = sql(s"select id from $table where name rlike name") + val (sparkPlan, cometPlan) = checkSparkAnswer(query2) + val explain = new ExtendedExplainInfo().generateExtendedInfo(cometPlan) + assert(explain == "Only scalar patterns are supported") + } + } + } + test("like with custom escape") { val table = "names" withTable(table) {