Skip to content

Commit

Permalink
address review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
kazuyukitanimura committed May 16, 2024
1 parent f5380e5 commit 1bde6ea
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ trait ShimCometSparkSessionExtensions {
*/
def getOffset(limit: LimitExec): Int = getOffsetOpt(limit).getOrElse(0)

/**
* TODO: delete after dropping Spark 3.x support and directly call
* SQLConf.EXTENDED_EXPLAIN_PROVIDERS.key
*/
protected val EXTENDED_EXPLAIN_PROVIDERS_KEY = "spark.sql.extendedExplainProviders"
}

object ShimCometSparkSessionExtensions {
Expand All @@ -64,6 +69,4 @@ object ShimCometSparkSessionExtensions {
}
true
}

protected val EXTENDED_EXPLAIN_PROVIDERS_KEY = "spark.sql.extendedExplainProviders"
}
Original file line number Diff line number Diff line change
Expand Up @@ -247,9 +247,7 @@ abstract class CometTestBase
expectedInfo: Set[String]): Unit = {
var expected: Array[Row] = Array.empty
var dfSpark: Dataset[Row] = null
withSQLConf(
CometConf.COMET_ENABLED.key -> "false",
EXTENDED_EXPLAIN_PROVIDERS_KEY -> "") {
withSQLConf(CometConf.COMET_ENABLED.key -> "false", EXTENDED_EXPLAIN_PROVIDERS_KEY -> "") {
dfSpark = Dataset.ofRows(spark, df.logicalPlan)
expected = dfSpark.collect()
}
Expand Down

0 comments on commit 1bde6ea

Please sign in to comment.