Skip to content

Commit

Permalink
Merge branch 'main' into fix_aqe_plan
Browse files Browse the repository at this point in the history
  • Loading branch information
viirya authored Mar 13, 2024
2 parents 8b7946c + d069713 commit 5a7f7fe
Show file tree
Hide file tree
Showing 13 changed files with 1,471 additions and 10 deletions.
2 changes: 1 addition & 1 deletion .github/actions/java-test/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ runs:
- name: Run Maven compile
shell: bash
run: |
./mvnw -B compile test-compile scalafix:scalafix -Psemanticdb ${{ inputs.maven_opts }}
./mvnw -B compile test-compile scalafix:scalafix -Dscalafix.mode=CHECK -Psemanticdb ${{ inputs.maven_opts }}
- name: Run tests
shell: bash
Expand Down
64 changes: 64 additions & 0 deletions .github/actions/setup-spark-builder/action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

name: Setup Spark Builder
description: 'Setup Apache Spark to run SQL tests'
inputs:
spark-short-version:
description: 'The Apache Spark short version (e.g., 3.4) to build'
required: true
default: '3.4'
spark-version:
description: 'The Apache Spark version (e.g., 3.4.2) to build'
required: true
default: '3.4.2'
comet-version:
description: 'The Comet version to use for Spark'
required: true
default: '0.1.0-SNAPSHOT'
runs:
using: "composite"
steps:
- name: Clone Spark repo
uses: actions/checkout@v4
with:
repository: apache/spark
path: apache-spark
ref: v${{inputs.spark-version}}
fetch-depth: 1

- name: Setup Spark for Comet
shell: bash
run: |
cd apache-spark
git apply ../dev/diffs/${{inputs.spark-version}}.diff
../mvnw -nsu -q versions:set-property -Dproperty=comet.version -DnewVersion=${{inputs.comet-version}} -DgenerateBackupPoms=false
- name: Cache Maven dependencies
uses: actions/cache@v4
with:
path: |
~/.m2/repository
/root/.m2/repository
key: ${{ runner.os }}-spark-sql-${{ hashFiles('spark/**/pom.xml', 'common/**/pom.xml') }}
restore-keys: |
${{ runner.os }}-spark-sql-
- name: Build Comet
shell: bash
run: |
PROFILES="-Pspark-${{inputs.spark-short-version}}" make release
79 changes: 79 additions & 0 deletions .github/workflows/spark_sql_test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

name: Spark SQL Tests

concurrency:
group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
cancel-in-progress: true

on:
push:
paths-ignore:
- "doc/**"
- "**.md"
pull_request:
paths-ignore:
- "doc/**"
- "**.md"
# manual trigger
# https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow
workflow_dispatch:

env:
RUST_VERSION: nightly

jobs:
spark-sql-catalyst:
strategy:
matrix:
os: [ubuntu-latest]
java-version: [11]
spark-version: [{short: '3.4', full: '3.4.2'}]
module:
- {name: "catalyst", args1: "catalyst/test", args2: ""}
- {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
- {name: "sql/core-2", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.ExtendedSQLTest"}
- {name: "sql/core-3", args1: "", args2: "sql/testOnly * -- -n org.apache.spark.tags.SlowSQLTest"}
- {name: "sql/hive-1", args1: "", args2: "hive/testOnly * -- -l org.apache.spark.tags.ExtendedHiveTest -l org.apache.spark.tags.SlowHiveTest"}
- {name: "sql/hive-2", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.ExtendedHiveTest"}
- {name: "sql/hive-3", args1: "", args2: "hive/testOnly * -- -n org.apache.spark.tags.SlowHiveTest"}
fail-fast: false
name: spark-sql-${{ matrix.module.name }}/${{ matrix.os }}/spark-${{ matrix.spark-version.full }}/java-${{ matrix.java-version }}
runs-on: ${{ matrix.os }}
container:
image: amd64/rust
steps:
- uses: actions/checkout@v4
- name: Setup Rust & Java toolchain
uses: ./.github/actions/setup-builder
with:
rust-version: ${{env.RUST_VERSION}}
jdk-version: ${{ matrix.java-version }}
- name: Setup Spark
uses: ./.github/actions/setup-spark-builder
with:
spark-version: ${{ matrix.spark-version.full }}
spark-short-version: ${{ matrix.spark-version.short }}
comet-version: '0.1.0-SNAPSHOT' # TODO: get this from pom.xml
- name: Run Spark tests
run: |
cd apache-spark
ENABLE_COMET=true build/sbt ${{ matrix.module.args1 }} "${{ matrix.module.args2 }}"
env:
LC_ALL: "C.UTF-8"

4 changes: 4 additions & 0 deletions DEVELOPMENT.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,3 +84,7 @@ in the respective source code, e.g., `CometTPCHQueryBenchmark`.
## Debugging
Comet is a multi-language project with native code written in Rust and JVM code written in Java and Scala.
It is possible to debug both native and JVM code concurrently as described in the [DEBUGGING guide](DEBUGGING.md)

## Submitting a Pull Request
Comet uses `cargo fmt`, [Scalafix](https://github.com/scalacenter/scalafix) and [Spotless](https://github.com/diffplug/spotless/tree/main/plugin-maven) to
automatically format the code. Before submitting a pull request, you can simply run `make format` to format the code.
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ clean:
bench:
cd core && RUSTFLAGS="-Ctarget-cpu=native" cargo bench $(filter-out $@,$(MAKECMDGOALS))
format:
cd core && cargo fmt
./mvnw compile test-compile scalafix:scalafix -Psemanticdb $(PROFILES)
./mvnw spotless:apply $(PROFILES)

Expand Down
2 changes: 2 additions & 0 deletions bin/comet-spark-shell
Original file line number Diff line number Diff line change
Expand Up @@ -81,4 +81,6 @@ RUST_BACKTRACE=1 $SPARK_HOME/bin/spark-shell \
--conf spark.comet.enabled=true \
--conf spark.comet.exec.enabled=true \
--conf spark.comet.exec.all.enabled=true \
--conf spark.comet.exec.shuffle.enabled=true \
--conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
$@
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,10 @@ public class ReadOptions {
// to reduce the skew. This will result in a slightly larger number of connections
// opened to the file system but may give improved performance.
// The option is off by default.
public static final String BOSON_IO_ADJUST_READRANGE_SKEW =
"boson.parquet.read.io.adjust.readRange.skew";
public static final String COMET_IO_ADJUST_READRANGE_SKEW =
"comet.parquet.read.io.adjust.readRange.skew";

private static final boolean BOSON_IO_ADJUST_READRANGE_SKEW_DEFAULT = false;
private static final boolean COMET_IO_ADJUST_READRANGE_SKEW_DEFAULT = false;

// Max number of concurrent tasks we expect. Used to autoconfigure S3 client connections
public static final int S3A_MAX_EXPECTED_PARALLELISM = 32;
Expand Down Expand Up @@ -180,7 +180,7 @@ public Builder(Configuration conf) {
this.ioMergeRangesDelta =
conf.getInt(COMET_IO_MERGE_RANGES_DELTA, COMET_IO_MERGE_RANGES_DELTA_DEFAULT);
this.adjustReadRangeSkew =
conf.getBoolean(BOSON_IO_ADJUST_READRANGE_SKEW, BOSON_IO_ADJUST_READRANGE_SKEW_DEFAULT);
conf.getBoolean(COMET_IO_ADJUST_READRANGE_SKEW, COMET_IO_ADJUST_READRANGE_SKEW_DEFAULT);
// override some S3 defaults
setS3Config();
}
Expand Down
Loading

0 comments on commit 5a7f7fe

Please sign in to comment.