-
Notifications
You must be signed in to change notification settings - Fork 169
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
build: Add spark-4.0 profile and shims #407
Changes from 18 commits
a1fff9b
465b828
62b7d2f
8db78cb
02a970a
7251eb2
17a6995
57d6538
d3efeb8
e310eb1
69ca228
3aec9e6
d629df1
65628fb
328705f
b85c712
8dc9dba
9a4b605
396d077
f472ee3
5179467
85e698f
5463bd9
0b81d7f
d45ac85
32bc314
d5f822f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -76,6 +76,50 @@ jobs: | |
# upload test reports only for java 17 | ||
upload-test-reports: ${{ matrix.java_version == '17' }} | ||
|
||
linux-test-with-spark4_0: | ||
strategy: | ||
matrix: | ||
os: [ubuntu-latest] | ||
java_version: [17] | ||
test-target: [rust, java] | ||
spark-version: ['4.0'] | ||
is_push_event: | ||
- ${{ github.event_name == 'push' }} | ||
fail-fast: false | ||
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }} | ||
runs-on: ${{ matrix.os }} | ||
container: | ||
image: amd64/rust | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: Setup Rust & Java toolchain | ||
uses: ./.github/actions/setup-builder | ||
with: | ||
rust-version: ${{env.RUST_VERSION}} | ||
jdk-version: ${{ matrix.java_version }} | ||
- if: matrix.test-target == 'rust' | ||
name: Rust test steps | ||
uses: ./.github/actions/rust-test | ||
- if: matrix.test-target == 'java' | ||
name: Clone Spark | ||
uses: actions/checkout@v4 | ||
with: | ||
repository: "apache/spark" | ||
path: "apache-spark" | ||
- if: matrix.test-target == 'java' | ||
name: Install Spark | ||
shell: bash | ||
working-directory: ./apache-spark | ||
run: build/mvn install -Phive -Phadoop-cloud -DskipTests | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is only needed for Spark 4.0? I don't see we install it for other Spark versions. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, This is only needed for Spark 4.0 because there is no |
||
- if: matrix.test-target == 'java' | ||
name: Java test steps | ||
uses: ./.github/actions/java-test | ||
with: | ||
# TODO: remove -DskipTests after fixing tests | ||
maven_opts: "-Pspark-${{ matrix.spark-version }} -DskipTests" | ||
# TODO: upload test reports after enabling tests | ||
upload-test-reports: false | ||
|
||
linux-test-with-old-spark: | ||
strategy: | ||
matrix: | ||
|
@@ -169,6 +213,93 @@ jobs: | |
with: | ||
maven_opts: -Pspark-${{ matrix.spark-version }},scala-${{ matrix.scala-version }} | ||
|
||
macos-test-with-spark4_0: | ||
strategy: | ||
matrix: | ||
os: [macos-13] | ||
java_version: [17] | ||
test-target: [rust, java] | ||
spark-version: ['4.0'] | ||
fail-fast: false | ||
if: github.event_name == 'push' | ||
name: ${{ matrix.os }}/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }} | ||
runs-on: ${{ matrix.os }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: Setup Rust & Java toolchain | ||
uses: ./.github/actions/setup-macos-builder | ||
with: | ||
rust-version: ${{env.RUST_VERSION}} | ||
jdk-version: ${{ matrix.java_version }} | ||
- if: matrix.test-target == 'rust' | ||
name: Rust test steps | ||
uses: ./.github/actions/rust-test | ||
- if: matrix.test-target == 'java' | ||
name: Clone Spark | ||
uses: actions/checkout@v4 | ||
with: | ||
repository: "apache/spark" | ||
path: "apache-spark" | ||
- if: matrix.test-target == 'java' | ||
name: Install Spark | ||
shell: bash | ||
working-directory: ./apache-spark | ||
run: build/mvn install -Phive -Phadoop-cloud -DskipTests | ||
- if: matrix.test-target == 'java' | ||
name: Java test steps | ||
uses: ./.github/actions/java-test | ||
with: | ||
# TODO: remove -DskipTests after fixing tests | ||
maven_opts: "-Pspark-${{ matrix.spark-version }} -DskipTests" | ||
# TODO: upload test reports after enabling tests | ||
upload-test-reports: false | ||
|
||
macos-aarch64-test-with-spark4_0: | ||
strategy: | ||
matrix: | ||
java_version: [17] | ||
test-target: [rust, java] | ||
spark-version: ['4.0'] | ||
is_push_event: | ||
- ${{ github.event_name == 'push' }} | ||
exclude: # exclude java 11 for pull_request event | ||
- java_version: 11 | ||
is_push_event: false | ||
fail-fast: false | ||
name: macos-14(Silicon)/java ${{ matrix.java_version }}-spark-${{matrix.spark-version}}/${{ matrix.test-target }} | ||
runs-on: macos-14 | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: Setup Rust & Java toolchain | ||
uses: ./.github/actions/setup-macos-builder | ||
with: | ||
rust-version: ${{env.RUST_VERSION}} | ||
jdk-version: ${{ matrix.java_version }} | ||
jdk-architecture: aarch64 | ||
protoc-architecture: aarch_64 | ||
- if: matrix.test-target == 'rust' | ||
name: Rust test steps | ||
uses: ./.github/actions/rust-test | ||
- if: matrix.test-target == 'java' | ||
name: Clone Spark | ||
uses: actions/checkout@v4 | ||
with: | ||
repository: "apache/spark" | ||
path: "apache-spark" | ||
- if: matrix.test-target == 'java' | ||
name: Install Spark | ||
shell: bash | ||
working-directory: ./apache-spark | ||
run: build/mvn install -Phive -Phadoop-cloud -DskipTests | ||
- if: matrix.test-target == 'java' | ||
name: Java test steps | ||
uses: ./.github/actions/java-test | ||
with: | ||
# TODO: remove -DskipTests after fixing tests | ||
maven_opts: "-Pspark-${{ matrix.spark-version }} -DskipTests" | ||
# TODO: upload test reports after enabling tests | ||
upload-test-reports: false | ||
|
||
macos-aarch64-test-with-old-spark: | ||
strategy: | ||
matrix: | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
|
||
package org.apache.spark.sql.comet.shims | ||
|
||
import org.apache.spark.sql.types._ | ||
|
||
trait ShimCometParquetUtils { | ||
// The following is copied from QueryExecutionErrors | ||
// TODO: remove after dropping Spark 3.2.0 support and directly use | ||
// QueryExecutionErrors.foundDuplicateFieldInFieldIdLookupModeError | ||
def foundDuplicateFieldInFieldIdLookupModeError( | ||
requiredId: Int, | ||
matchedFields: String): Throwable = { | ||
new RuntimeException(s""" | ||
|Found duplicate field(s) "$requiredId": $matchedFields | ||
|in id mapping mode | ||
""".stripMargin.replaceAll("\n", " ")) | ||
} | ||
|
||
// The followings are copied from org.apache.spark.sql.execution.datasources.parquet.ParquetUtils | ||
// TODO: remove after dropping Spark 3.2.0 support and directly use ParquetUtils | ||
/** | ||
* A StructField metadata key used to set the field id of a column in the Parquet schema. | ||
*/ | ||
val FIELD_ID_METADATA_KEY = "parquet.field.id" | ||
|
||
/** | ||
* Whether there exists a field in the schema, whether inner or leaf, has the parquet field ID | ||
* metadata. | ||
*/ | ||
def hasFieldIds(schema: StructType): Boolean = { | ||
def recursiveCheck(schema: DataType): Boolean = { | ||
schema match { | ||
case st: StructType => | ||
st.exists(field => hasFieldId(field) || recursiveCheck(field.dataType)) | ||
|
||
case at: ArrayType => recursiveCheck(at.elementType) | ||
|
||
case mt: MapType => recursiveCheck(mt.keyType) || recursiveCheck(mt.valueType) | ||
|
||
case _ => | ||
// No need to really check primitive types, just to terminate the recursion | ||
false | ||
} | ||
} | ||
if (schema.isEmpty) false else recursiveCheck(schema) | ||
} | ||
|
||
def hasFieldId(field: StructField): Boolean = | ||
field.metadata.contains(FIELD_ID_METADATA_KEY) | ||
|
||
def getFieldId(field: StructField): Int = { | ||
require( | ||
hasFieldId(field), | ||
s"The key `$FIELD_ID_METADATA_KEY` doesn't exist in the metadata of " + field) | ||
try { | ||
Math.toIntExact(field.metadata.getLong(FIELD_ID_METADATA_KEY)) | ||
} catch { | ||
case _: ArithmeticException | _: ClassCastException => | ||
throw new IllegalArgumentException( | ||
s"The key `$FIELD_ID_METADATA_KEY` must be a 32-bit integer") | ||
} | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
|
||
package org.apache.comet.shims | ||
|
||
import org.apache.spark.paths.SparkPath | ||
import org.apache.spark.sql.catalyst.InternalRow | ||
import org.apache.spark.sql.execution.datasources.PartitionedFile | ||
|
||
object ShimBatchReader { | ||
def newPartitionedFile(partitionValues: InternalRow, file: String): PartitionedFile = | ||
PartitionedFile( | ||
partitionValues, | ||
SparkPath.fromUrlString(file), | ||
-1, // -1 means we read the entire file | ||
-1, | ||
kazuyukitanimura marked this conversation as resolved.
Show resolved
Hide resolved
|
||
Array.empty[String], | ||
0, | ||
0 | ||
) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
|
||
package org.apache.comet.shims | ||
|
||
import org.apache.spark.sql.execution.datasources.FileFormat | ||
import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat | ||
|
||
object ShimFileFormat { | ||
val ROW_INDEX = ParquetFileFormat.ROW_INDEX | ||
|
||
// A name for a temporary column that holds row indexes computed by the file format reader | ||
// until they can be placed in the _metadata struct. | ||
val ROW_INDEX_TEMPORARY_COLUMN_NAME = ParquetFileFormat.ROW_INDEX_TEMPORARY_COLUMN_NAME | ||
|
||
val OPTION_RETURNING_BATCH = FileFormat.OPTION_RETURNING_BATCH | ||
kazuyukitanimura marked this conversation as resolved.
Show resolved
Hide resolved
|
||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Is it necessary to run rust test for Spark 4.0 separately? It should be no difference with 3.4 or 3.3/3.2.
We also don't run rust test for Spark 3.3/3.2 but only for Spark 3.4.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
removed