Skip to content

Commit

Permalink
resolving merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
vaibhawvipul committed May 7, 2024
2 parents 904430e + 8bba58e commit 3251f9a
Show file tree
Hide file tree
Showing 16 changed files with 559 additions and 185 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ Linux, Apple OSX (Intel and M1)

## Getting started

See the [DataFusion Comet User Guide](https://datafusion.apache.org/comet/user-guide/) for installation instructions.
See the [DataFusion Comet User Guide](https://datafusion.apache.org/comet/user-guide/installation.html) for installation instructions.

## Contributing
See the [DataFusion Comet Contribution Guide](https://datafusion.apache.org/comet/contributor-guide/contributing.html)
for information on how to get started contributing to the project.
for information on how to get started contributing to the project.
1 change: 1 addition & 0 deletions core/src/execution/datafusion/expressions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ pub mod avg_decimal;
pub mod bloom_filter_might_contain;
pub mod covariance;
pub mod stats;
pub mod stddev;
pub mod strings;
pub mod subquery;
pub mod sum_decimal;
Expand Down
179 changes: 179 additions & 0 deletions core/src/execution/datafusion/expressions/stddev.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

use std::{any::Any, sync::Arc};

use crate::execution::datafusion::expressions::{
stats::StatsType, utils::down_cast_any_ref, variance::VarianceAccumulator,
};
use arrow::{
array::ArrayRef,
datatypes::{DataType, Field},
};
use datafusion::logical_expr::Accumulator;
use datafusion_common::{internal_err, Result, ScalarValue};
use datafusion_physical_expr::{expressions::format_state_name, AggregateExpr, PhysicalExpr};

/// STDDEV and STDDEV_SAMP (standard deviation) aggregate expression
/// The implementation mostly is the same as the DataFusion's implementation. The reason
/// we have our own implementation is that DataFusion has UInt64 for state_field `count`,
/// while Spark has Double for count. Also we have added `null_on_divide_by_zero`
/// to be consistent with Spark's implementation.
#[derive(Debug)]
pub struct Stddev {
name: String,
expr: Arc<dyn PhysicalExpr>,
stats_type: StatsType,
null_on_divide_by_zero: bool,
}

impl Stddev {
/// Create a new STDDEV aggregate function
pub fn new(
expr: Arc<dyn PhysicalExpr>,
name: impl Into<String>,
data_type: DataType,
stats_type: StatsType,
null_on_divide_by_zero: bool,
) -> Self {
// the result of stddev just support FLOAT64.
assert!(matches!(data_type, DataType::Float64));
Self {
name: name.into(),
expr,
stats_type,
null_on_divide_by_zero,
}
}
}

impl AggregateExpr for Stddev {
/// Return a reference to Any that can be used for downcasting
fn as_any(&self) -> &dyn Any {
self
}

fn field(&self) -> Result<Field> {
Ok(Field::new(&self.name, DataType::Float64, true))
}

fn create_accumulator(&self) -> Result<Box<dyn Accumulator>> {
Ok(Box::new(StddevAccumulator::try_new(
self.stats_type,
self.null_on_divide_by_zero,
)?))
}

fn create_sliding_accumulator(&self) -> Result<Box<dyn Accumulator>> {
Ok(Box::new(StddevAccumulator::try_new(
self.stats_type,
self.null_on_divide_by_zero,
)?))
}

fn state_fields(&self) -> Result<Vec<Field>> {
Ok(vec![
Field::new(
format_state_name(&self.name, "count"),
DataType::Float64,
true,
),
Field::new(
format_state_name(&self.name, "mean"),
DataType::Float64,
true,
),
Field::new(format_state_name(&self.name, "m2"), DataType::Float64, true),
])
}

fn expressions(&self) -> Vec<Arc<dyn PhysicalExpr>> {
vec![self.expr.clone()]
}

fn name(&self) -> &str {
&self.name
}
}

impl PartialEq<dyn Any> for Stddev {
fn eq(&self, other: &dyn Any) -> bool {
down_cast_any_ref(other)
.downcast_ref::<Self>()
.map(|x| {
self.name == x.name
&& self.expr.eq(&x.expr)
&& self.null_on_divide_by_zero == x.null_on_divide_by_zero
&& self.stats_type == x.stats_type
})
.unwrap_or(false)
}
}

/// An accumulator to compute the standard deviation
#[derive(Debug)]
pub struct StddevAccumulator {
variance: VarianceAccumulator,
}

impl StddevAccumulator {
/// Creates a new `StddevAccumulator`
pub fn try_new(s_type: StatsType, null_on_divide_by_zero: bool) -> Result<Self> {
Ok(Self {
variance: VarianceAccumulator::try_new(s_type, null_on_divide_by_zero)?,
})
}

pub fn get_m2(&self) -> f64 {
self.variance.get_m2()
}
}

impl Accumulator for StddevAccumulator {
fn state(&mut self) -> Result<Vec<ScalarValue>> {
Ok(vec![
ScalarValue::from(self.variance.get_count()),
ScalarValue::from(self.variance.get_mean()),
ScalarValue::from(self.variance.get_m2()),
])
}

fn update_batch(&mut self, values: &[ArrayRef]) -> Result<()> {
self.variance.update_batch(values)
}

fn retract_batch(&mut self, values: &[ArrayRef]) -> Result<()> {
self.variance.retract_batch(values)
}

fn merge_batch(&mut self, states: &[ArrayRef]) -> Result<()> {
self.variance.merge_batch(states)
}

fn evaluate(&mut self) -> Result<ScalarValue> {
let variance = self.variance.evaluate()?;
match variance {
ScalarValue::Float64(Some(e)) => Ok(ScalarValue::Float64(Some(e.sqrt()))),
ScalarValue::Float64(None) => Ok(ScalarValue::Float64(None)),
_ => internal_err!("Variance should be f64"),
}
}

fn size(&self) -> usize {
std::mem::align_of_val(self) - std::mem::align_of_val(&self.variance) + self.variance.size()
}
}
2 changes: 0 additions & 2 deletions core/src/execution/datafusion/expressions/variance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
// specific language governing permissions and limitations
// under the License.

//! Defines physical expressions that can evaluated at runtime during query execution
use std::{any::Any, sync::Arc};

use crate::execution::datafusion::expressions::{stats::StatsType, utils::down_cast_any_ref};
Expand Down
25 changes: 25 additions & 0 deletions core/src/execution/datafusion/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ use crate::{
if_expr::IfExpr,
scalar_funcs::create_comet_physical_fun,
stats::StatsType,
stddev::Stddev,
strings::{Contains, EndsWith, Like, StartsWith, StringSpaceExec, SubstringExec},
subquery::Subquery,
sum_decimal::SumDecimal,
Expand Down Expand Up @@ -1260,6 +1261,30 @@ impl PhysicalPlanner {
))),
}
}
AggExprStruct::Stddev(expr) => {
let child = self.create_expr(expr.child.as_ref().unwrap(), schema.clone())?;
let datatype = to_arrow_datatype(expr.datatype.as_ref().unwrap());
match expr.stats_type {
0 => Ok(Arc::new(Stddev::new(
child,
"stddev",
datatype,
StatsType::Sample,
expr.null_on_divide_by_zero,
))),
1 => Ok(Arc::new(Stddev::new(
child,
"stddev_pop",
datatype,
StatsType::Population,
expr.null_on_divide_by_zero,
))),
stats_type => Err(ExecutionError::GeneralError(format!(
"Unknown StatisticsType {:?} for stddev",
stats_type
))),
}
}
}
}

Expand Down
8 changes: 8 additions & 0 deletions core/src/execution/proto/expr.proto
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ message AggExpr {
CovSample covSample = 12;
CovPopulation covPopulation = 13;
Variance variance = 14;
Stddev stddev = 15;
}
}

Expand Down Expand Up @@ -178,6 +179,13 @@ message Variance {
StatisticsType stats_type = 4;
}

message Stddev {
Expr child = 1;
bool null_on_divide_by_zero = 2;
DataType datatype = 3;
StatisticsType stats_type = 4;
}

message Literal {
oneof value {
bool bool_val = 1;
Expand Down
18 changes: 15 additions & 3 deletions docs/source/user-guide/compatibility-template.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,19 @@ Cast operations in Comet fall into three levels of support:
- **Unsupported**: Comet does not provide a native version of this cast expression and the query stage will fall back to
Spark.

The following table shows the current cast operations supported by Comet. Any cast that does not appear in this
table (such as those involving complex types and timestamp_ntz, for example) are not supported by Comet.
### Compatible Casts

<!--CAST_TABLE-->
The following cast operations are generally compatible with Spark except for the differences noted here.

<!--COMPAT_CAST_TABLE-->

### Incompatible Casts

The following cast operations are not compatible with Spark for all inputs and are disabled by default.

<!--INCOMPAT_CAST_TABLE-->

### Unsupported Casts

Any cast not listed in the previous tables is currently unsupported. We are working on adding more. See the
[tracking issue](https://github.com/apache/datafusion-comet/issues/286) for more details.
Loading

0 comments on commit 3251f9a

Please sign in to comment.