From cc1905adea2454e7cb5bf4f62bcdf7513a0eed7f Mon Sep 17 00:00:00 2001 From: Andy Grove Date: Sat, 20 Apr 2024 09:29:07 -0600 Subject: [PATCH] Fix compilation issue with Spark 3.2 and 3.3 --- .../org/apache/comet/serde/QueryPlanSerde.scala | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala index 79b00e13e..cae1d6b37 100644 --- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala +++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala @@ -415,19 +415,18 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde { timeZoneId: Option[String], dt: DataType, childExpr: Option[Expr], - evalMode: EvalMode.Value): Option[Expr] = { + evalMode: String): Option[Expr] = { val dataType = serializeDataType(dt) if (childExpr.isDefined && dataType.isDefined) { val castBuilder = ExprOuterClass.Cast.newBuilder() castBuilder.setChild(childExpr.get) castBuilder.setDatatype(dataType.get) + castBuilder.setEvalMode(evalMode) val timeZone = timeZoneId.getOrElse("UTC") castBuilder.setTimezone(timeZone) - castBuilder.setEvalMode(evalMode.toString) - Some( ExprOuterClass.Expr .newBuilder() @@ -451,7 +450,14 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde { case Cast(child, dt, timeZoneId, evalMode) => val childExpr = exprToProtoInternal(child, inputs) - castToProto(timeZoneId, dt, childExpr, evalMode) + val evalModeStr = if (evalMode.isInstanceOf[Boolean]) { + // Spark 3.2 & 3.3 has ansiEnabled boolean + if (evalMode.asInstanceOf[Boolean]) "ANSI" else "LEGACY" + } else { + // Spark 3.4+ has EvalMode enum with values LEGACY, ANSI, and TRY + evalMode.toString + } + castToProto(timeZoneId, dt, childExpr, evalModeStr) case add @ Add(left, right, _) if supportedDataType(left.dataType) => val leftExpr = exprToProtoInternal(left, inputs) @@ -1568,7 +1574,7 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde { val childExpr = scalarExprToProto("coalesce", exprChildren: _*) // TODO: Remove this once we have new DataFusion release which includes // the fix: https://github.com/apache/arrow-datafusion/pull/9459 - castToProto(None, a.dataType, childExpr, EvalMode.LEGACY) + castToProto(None, a.dataType, childExpr, "LEGACY") // With Spark 3.4, CharVarcharCodegenUtils.readSidePadding gets called to pad spaces for // char types. Use rpad to achieve the behavior.