Skip to content

Commit

Permalink
build: Add scala-version to matrix (#396)
Browse files Browse the repository at this point in the history
* build: Add scala-version to matrix

Add testing of Scala 2.13 along with 2.12 by adding `scala-version` to the matrix.  Explicitly exclude combinations of Spark `3.2` and Scala `2.13`.

* Update pr_build.yml

* chore: Fixes for scalafix

---------

Co-authored-by: Steve Vaughan <[email protected]>
  • Loading branch information
snmvaughan and Steve Vaughan authored May 8, 2024
1 parent 9df0236 commit ee86b0e
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 32 deletions.
29 changes: 19 additions & 10 deletions .github/workflows/pr_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,14 @@ jobs:
java_version: [8, 11, 17]
test-target: [rust, java]
spark-version: ['3.4']
scala-version: ['2.12', '2.13']
is_push_event:
- ${{ github.event_name == 'push' }}
exclude: # exclude java 11 for pull_request event
- java_version: 11
is_push_event: false
fail-fast: false
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }}
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ matrix.test-target }}
runs-on: ${{ matrix.os }}
container:
image: amd64/rust
Expand All @@ -71,7 +72,7 @@ jobs:
name: Java test steps
uses: ./.github/actions/java-test
with:
maven_opts: -Pspark-${{ matrix.spark-version }}
maven_opts: -Pspark-${{ matrix.spark-version }},scala-${{ matrix.scala-version }}
# upload test reports only for java 17
upload-test-reports: ${{ matrix.java_version == '17' }}

Expand All @@ -82,13 +83,16 @@ jobs:
java_version: [8, 11, 17]
test-target: [java]
spark-version: ['3.2', '3.3']
scala-version: ['2.12', '2.13']
exclude:
- java_version: 17
spark-version: '3.2'
- java_version: 11
spark-version: '3.2'
- spark-version: '3.2'
scala-version: '2.13'
fail-fast: false
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }}
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ matrix.test-target }}
runs-on: ${{ matrix.os }}
container:
image: amd64/rust
Expand All @@ -102,7 +106,7 @@ jobs:
- name: Java test steps
uses: ./.github/actions/java-test
with:
maven_opts: -Pspark-${{ matrix.spark-version }}
maven_opts: -Pspark-${{ matrix.spark-version }},scala-${{ matrix.scala-version }}

macos-test:
strategy:
Expand All @@ -111,9 +115,10 @@ jobs:
java_version: [8, 11, 17]
test-target: [rust, java]
spark-version: ['3.4']
scala-version: ['2.12', '2.13']
fail-fast: false
if: github.event_name == 'push'
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }}
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ matrix.test-target }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
Expand All @@ -129,21 +134,22 @@ jobs:
name: Java test steps
uses: ./.github/actions/java-test
with:
maven_opts: -Pspark-${{ matrix.spark-version }}
maven_opts: -Pspark-${{ matrix.spark-version }},scala-${{ matrix.scala-version }}

macos-aarch64-test:
strategy:
matrix:
java_version: [8, 11, 17]
test-target: [rust, java]
spark-version: ['3.4']
scala-version: ['2.12', '2.13']
is_push_event:
- ${{ github.event_name == 'push' }}
exclude: # exclude java 11 for pull_request event
- java_version: 11
is_push_event: false
fail-fast: false
name: macos-14(Silicon)/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }}
name: macos-14(Silicon)/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ matrix.test-target }}
runs-on: macos-14
steps:
- uses: actions/checkout@v4
Expand All @@ -161,21 +167,24 @@ jobs:
name: Java test steps
uses: ./.github/actions/java-test
with:
maven_opts: -Pspark-${{ matrix.spark-version }}
maven_opts: -Pspark-${{ matrix.spark-version }},scala-${{ matrix.scala-version }}

macos-aarch64-test-with-old-spark:
strategy:
matrix:
java_version: [8, 17]
test-target: [java]
spark-version: ['3.2', '3.3']
scala-version: ['2.12', '2.13']
exclude:
- java_version: 17
spark-version: '3.2'
- java_version: 8
spark-version: '3.3'
- spark-version: '3.2'
scala-version: '2.13'
fail-fast: false
name: macos-14(Silicon)/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }}
name: macos-14(Silicon)/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ matrix.test-target }}
runs-on: macos-14
steps:
- uses: actions/checkout@v4
Expand All @@ -190,5 +199,5 @@ jobs:
name: Java test steps
uses: ./.github/actions/java-test
with:
maven_opts: -Pspark-${{ matrix.spark-version }}
maven_opts: -Pspark-${{ matrix.spark-version }},scala-${{ matrix.scala-version }}

Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ object CometShuffleExchangeExec extends ShimCometShuffleExchangeExec {
// seed by hashing will help. Refer to SPARK-21782 for more details.
val partitionId = TaskContext.get().partitionId()
var position = new XORShiftRandom(partitionId).nextInt(numPartitions)
(row: InternalRow) => {
(_: InternalRow) => {
// The HashPartitioner will handle the `mod` by the number of partitions
position += 1
position
Expand Down
41 changes: 21 additions & 20 deletions spark/src/test/scala/org/apache/comet/CometCastSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -867,26 +867,27 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
}

private def castFallbackTestTimezone(
input: DataFrame,
toType: DataType,
expectedMessage: String): Unit = {
withTempPath { dir =>
val data = roundtripParquet(input, dir).coalesce(1)
data.createOrReplaceTempView("t")

withSQLConf(
(SQLConf.ANSI_ENABLED.key, "false"),
(CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key, "true"),
(SQLConf.SESSION_LOCAL_TIMEZONE.key, "America/Los_Angeles")) {
val df = data.withColumn("converted", col("a").cast(toType))
df.collect()
val str =
new ExtendedExplainInfo().generateExtendedInfo(df.queryExecution.executedPlan)
assert(str.contains(expectedMessage))
}
}
}
// TODO Commented out to work around scalafix since this is currently unused.
// private def castFallbackTestTimezone(
// input: DataFrame,
// toType: DataType,
// expectedMessage: String): Unit = {
// withTempPath { dir =>
// val data = roundtripParquet(input, dir).coalesce(1)
// data.createOrReplaceTempView("t")
//
// withSQLConf(
// (SQLConf.ANSI_ENABLED.key, "false"),
// (CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key, "true"),
// (SQLConf.SESSION_LOCAL_TIMEZONE.key, "America/Los_Angeles")) {
// val df = data.withColumn("converted", col("a").cast(toType))
// df.collect()
// val str =
// new ExtendedExplainInfo().generateExtendedInfo(df.queryExecution.executedPlan)
// assert(str.contains(expectedMessage))
// }
// }
// }

private def castTimestampTest(input: DataFrame, toType: DataType) = {
withTempPath { dir =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSpar
}

test("columnar shuffle on nested array") {
Seq("false", "true").foreach { execEnabled =>
Seq("false", "true").foreach { _ =>
Seq(10, 201).foreach { numPartitions =>
Seq("1.0", "10.0").foreach { ratio =>
withSQLConf(CometConf.COMET_SHUFFLE_PREFER_DICTIONARY_RATIO.key -> ratio) {
Expand Down

0 comments on commit ee86b0e

Please sign in to comment.