Skip to content

Commit

Permalink
chore: Fixes for scalafix
Browse files Browse the repository at this point in the history
  • Loading branch information
Steve Vaughan committed May 7, 2024
1 parent 9e13161 commit b9834aa
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ object CometShuffleExchangeExec extends ShimCometShuffleExchangeExec {
// seed by hashing will help. Refer to SPARK-21782 for more details.
val partitionId = TaskContext.get().partitionId()
var position = new XORShiftRandom(partitionId).nextInt(numPartitions)
(row: InternalRow) => {
(_: InternalRow) => {
// The HashPartitioner will handle the `mod` by the number of partitions
position += 1
position
Expand Down
41 changes: 21 additions & 20 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -867,26 +867,27 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
}

private def castFallbackTestTimezone(
input: DataFrame,
toType: DataType,
expectedMessage: String): Unit = {
withTempPath { dir =>
val data = roundtripParquet(input, dir).coalesce(1)
data.createOrReplaceTempView("t")

withSQLConf(
(SQLConf.ANSI_ENABLED.key, "false"),
(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key, "true"),
(SQLConf.SESSION_LOCAL_TIMEZONE.key, "America/Los_Angeles")) {
val df = data.withColumn("converted", col("a").cast(toType))
df.collect()
val str =
new ExtendedExplainInfo().generateExtendedInfo(df.queryExecution.executedPlan)
assert(str.contains(expectedMessage))
}
}
}
// TODO Commented out to work around scalafix since this is currently unused.
// private def castFallbackTestTimezone(
// input: DataFrame,
// toType: DataType,
// expectedMessage: String): Unit = {
// withTempPath { dir =>
// val data = roundtripParquet(input, dir).coalesce(1)
// data.createOrReplaceTempView("t")
//
// withSQLConf(
// (SQLConf.ANSI_ENABLED.key, "false"),
// (CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key, "true"),
// (SQLConf.SESSION_LOCAL_TIMEZONE.key, "America/Los_Angeles")) {
// val df = data.withColumn("converted", col("a").cast(toType))
// df.collect()
// val str =
// new ExtendedExplainInfo().generateExtendedInfo(df.queryExecution.executedPlan)
// assert(str.contains(expectedMessage))
// }
// }
// }

private def castTimestampTest(input: DataFrame, toType: DataType) = {
withTempPath { dir =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSpar
}

test("columnar shuffle on nested array") {
Seq("false", "true").foreach { execEnabled =>
Seq("false", "true").foreach { _ =>
Seq(10, 201).foreach { numPartitions =>
Seq("1.0", "10.0").foreach { ratio =>
withSQLConf(CometConf.COMET_SHUFFLE_PREFER_DICTIONARY_RATIO.key -> ratio) {
Expand Down

0 comments on commit b9834aa

Please sign in to comment.