diff --git a/.asf.yaml b/.asf.yaml index ef141f3bb3fa..d126c562838f 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -49,6 +49,7 @@ github: protected_branches: master: {} + release-2.53.0: {} release-2.52.0: {} release-2.51.0: {} release-2.50.0: {} diff --git a/.github/REVIEWERS.yml b/.github/REVIEWERS.yml index c2a57ac8feef..b878d2f81cc8 100644 --- a/.github/REVIEWERS.yml +++ b/.github/REVIEWERS.yml @@ -60,6 +60,10 @@ labels: - igorbernstein2 - mutianf exclusionList: [] + - name: healthcare + reviewers: + - svetakvsundhar + exclusionList: [] - name: Build reviewers: - damccorm diff --git a/.github/actions/gradle-command-action b/.github/actions/gradle-command-action deleted file mode 160000 index 90ccf054e6b9..000000000000 --- a/.github/actions/gradle-command-action +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 90ccf054e6b9905f30f98c938bce4c6acd323b6b diff --git a/.github/actions/gradle-command-self-hosted-action/action.yml b/.github/actions/gradle-command-self-hosted-action/action.yml index 906b35169d9d..e2fd768220a3 100644 --- a/.github/actions/gradle-command-self-hosted-action/action.yml +++ b/.github/actions/gradle-command-self-hosted-action/action.yml @@ -35,9 +35,9 @@ runs: - name: Run Gradle Command shell: bash run: | - # Removing settings.xml is a workaround to avoid a decryption issue - # of Beam's gradle-command-action plugin and github's provided - # maven settings.xml file + # This step is a workaround to avoid a decryption issue of Beam's + # net.linguica.gradle.maven.settings plugin and github's provided maven + # settings.xml file if [ -f ~/.m2/settings.xml ]; then rm ~/.m2/settings.xml fi diff --git a/.github/actions/setup-action/action.yml b/.github/actions/setup-action/action.yml index 743e89a931da..4c4bb2753194 100644 --- a/.github/actions/setup-action/action.yml +++ b/.github/actions/setup-action/action.yml @@ -36,8 +36,10 @@ runs: shell: bash if: github.event_name == 'issue_comment' && github.event.comment.body != inputs.comment_phrase run: | - echo "The comment ${{ github.event.comment.body }} does not match the phrase for this instance: ${{ inputs.comment_phrase }}. Exiting." + echo "The comment $PHRASE does not match the phrase for this instance: ${{ inputs.comment_phrase }}. Exiting." exit 1 + env: + PHRASE: "${{ github.event.comment.body }}" - name: Check out repository code if pull request commit shell: bash if: ${{ github.event_name == 'pull_request_target' }} @@ -69,4 +71,4 @@ runs: - name: expose gcloud path shell: bash run: | - echo KUBELET_GCLOUD_CONFIG_PATH=/var/lib/kubelet/pods/$POD_UID/volumes/kubernetes.io~empty-dir/gcloud >> $GITHUB_ENV \ No newline at end of file + echo KUBELET_GCLOUD_CONFIG_PATH=/var/lib/kubelet/pods/$POD_UID/volumes/kubernetes.io~empty-dir/gcloud >> $GITHUB_ENV diff --git a/.github/autolabeler.yml b/.github/autolabeler.yml index 57c8f65c6ac1..a4ce41fce073 100644 --- a/.github/autolabeler.yml +++ b/.github/autolabeler.yml @@ -57,6 +57,7 @@ io: ["sdks/go/pkg/beam/io/**/*", "sdks/java/io/**/*", "sdks/python/apache_beam/ "hadoop": ["sdks/java/io/hadoop-common/**/*", "sdks/java/io/hadoop-file-system/**/*", "sdks/java/io/hadoop-format/**/*"] "hbase": ["sdks/java/io/hbase/**/*"] "hcatalog": ["sdks/java/io/hcatalog/**/*"] +"healthcare": ["sdks/python/apache_beam/io/gcp/healthcare/**/*", "sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/**/*"] "jdbc": ["sdks/java/io/jdbc/**/*"] "jms": ["sdks/java/io/jms/**/*"] "kafka": ["sdks/java/io/kafka/**/*"] diff --git a/.github/build.gradle b/.github/build.gradle index acfe7f3686a9..de2670545625 100644 --- a/.github/build.gradle +++ b/.github/build.gradle @@ -28,6 +28,7 @@ buildscript { /** check that yml are valid */ task check { doLast { + List errors = [] fileTree("${project.projectDir}/workflows").matching { include "*.yml" include "*.yaml" @@ -36,23 +37,41 @@ task check { // attempt load yml to make sure its valid def workflow = new org.yaml.snakeyaml.Yaml().load(it.newInputStream()) - // additional guards for running all tests functionality - // TODO(yathu) expand this also to post commits prior teardown Jenkins postcommits - if ( fname.startsWith("beam_PreCommit")) { + // additional guards to ensure tests configured in same way + if ( fname.startsWith("beam_PreCommit") || fname.startsWith("beam_PostCommit") ) { List paths try { paths = workflow.getAt(true).pull_request_target.paths as List } catch (Exception e) { - throw new GradleException("Fail to get the trigger path for ${fname}. " + - "Make sure precommit has a pull_request_target trigger.", e) + errors.add("Fail to get the trigger path for ${fname}. " + + "Make sure it has a pull_request_target trigger.") + return } - // precommit should triggered by this specific file + + // precommit and postcommit should triggered by this specific file + // this is to ensure not missing test during release branch verification if (paths != null && !paths.contains('release/trigger_all_tests.json')) { - throw new GradleException("Error validating ${fname}: " + - "Please add 'release/trigger_all_tests.json' to the trigger path for release verification run properly") + errors.add("Error validating ${fname}: " + + "Please add 'release/trigger_all_tests.json' to the trigger path") + return + } + + // postcommit should triggered by a specific file so that there is a way to exercise post for open PR + // TODO(https://github.com/apache/beam/issues/28909) + // remove file match trigger once a better trigger (e.g. comment trigger) is implemented + if (fname.startsWith("beam_PostCommit")) { + String triggerFile = '.github/trigger_files/' + fname.take(fname.lastIndexOf('.')) + '.json' + if (paths != null && !paths.contains(triggerFile)) { + errors.add("Error validating ${fname}: " + + "Please add ${triggerFile} to the trigger path") + return + } } } } + if (!errors.isEmpty()) { + throw new GradleException("Check failed: " + errors.join('\n')) + } } } diff --git a/.github/gh-actions-self-hosted-runners/arc/locals.tf b/.github/gh-actions-self-hosted-runners/arc/locals.tf index a69d069ab865..7820ce3e8aa3 100644 --- a/.github/gh-actions-self-hosted-runners/arc/locals.tf +++ b/.github/gh-actions-self-hosted-runners/arc/locals.tf @@ -20,6 +20,8 @@ locals { arc_values = { + #fix dind issue + "image.dindSidecarRepositoryAndTag" = "docker:24.0.7-dind-alpine3.18" "githubWebhookServer.enabled" = "${var.deploy_webhook}" "authSecret.create" = "true" "authSecret.github_app_id" = data.google_secret_manager_secret_version.github_app_id.secret_data diff --git a/.github/trigger_files/beam_PostCommit_Go.json b/.github/trigger_files/beam_PostCommit_Go.json new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/trigger_files/beam_PostCommit_Go_VR_Flink.json b/.github/trigger_files/beam_PostCommit_Go_VR_Flink.json new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/trigger_files/beam_PostCommit_Go_VR_Samza.json b/.github/trigger_files/beam_PostCommit_Go_VR_Samza.json new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/trigger_files/beam_PostCommit_Go_VR_Spark.json b/.github/trigger_files/beam_PostCommit_Go_VR_Spark.json new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/.github/workflows/README.md b/.github/workflows/README.md index c4f437dedd29..b39dfc9d5924 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -138,6 +138,8 @@ In order to make it easier for non-committers to interact with workflows, workfl 2) Each job should have the rerun action immediately after its checkout step. You can add a step that uses the `setup-action` action in your workflow, which encapsulates the checkout and rerun logic in one place. This should be gated on the comment trigger (example: https://github.com/apache/beam/blob/0ee2dc73ec6f555a5bf1a643dffd37f4927be67e/.github/workflows/beam_PreCommit_Go.yml#L65-L70) 3) Each job should have a descriptive name that includes the comment trigger (example: https://github.com/apache/beam/blob/ba8fc935222aeb070668fbafd588bc58e7a21289/.github/workflows/beam_PreCommit_CommunityMetrics.yml#L48) +**Note:** this approach is found not scalable ([#28909](https://github.com/apache/beam/issues/28909)) and currently only enabled for PreCommit workflows. For PostCommit jobs, it is currently replaced by a temporary approach of `pull_request_target` trigger with specific path `.github/trigger_files/`. + # Testing new workflows or workflow updates ## Testing New Workflows @@ -192,6 +194,8 @@ Please note that jobs with matrix need to have matrix element in the comment. Ex ### PreCommit Jobs +PreCommit Jobs run in a schedule and also get triggered in a PR if relevant sources has changed. To manually trigger certain PreCommit job, comment with the Trigger Phrase (listed below) in the PR. + | Workflow name | Matrix | Trigger Phrase | Cron Status | |:-------------:|:------:|:--------------:|:-----------:| | [ PreCommit Community Metrics ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_CommunityMetrics.yml) | N/A |`Run CommunityMetrics PreCommit`| [![.github/workflows/beam_PreCommit_CommunityMetrics.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_CommunityMetrics.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_CommunityMetrics.yml?query=event%3Aschedule) | @@ -269,207 +273,210 @@ Please note that jobs with matrix need to have matrix element in the comment. Ex ### PostCommit Jobs -| Workflow name | Matrix | Trigger Phrase | Cron Status | +PostCommit Jobs run in a schedule against master branch and generally do not get triggered in a PR. To manually trigger certain PostCommit job, attach a file named "Trigger file" (listed below) under `.github/trigger_files/` folder. + +| Workflow name | Matrix | Trigger file | Cron Status | |:-------------:|:------:|:--------------:|:-----------:| -| [ PostCommit BeamMetrics Publish ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_BeamMetrics_Publish.yml) | N/A |`Run Beam Metrics Deployment`| [![.github/workflows/beam_PostCommit_BeamMetrics_Publish.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_BeamMetrics_Publish.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_BeamMetrics_Publish.yml?query=event%3Aschedule) -| [ PostCommit Go ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go.yml) | N/A |`Run Go PostCommit`| [![.github/workflows/beam_PostCommit_Go.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go.yml?query=event%3Aschedule) | -| [ PostCommit Go Dataflow ARM](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_Dataflow_ARM.yml) | N/A |`Run Go PostCommit Dataflow ARM`| [![.github/workflows/beam_PostCommit_Go_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_Dataflow_ARM.yml?query=event%3Aschedule) | -| [ PostCommit Go VR Flink](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Flink.yml) | N/A |`Run Go Flink ValidatesRunner`| [![.github/workflows/beam_PostCommit_Go_VR_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Go VR Samza](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Samza.yml) | N/A |`Run Go Samza ValidatesRunner`| [![.github/workflows/beam_PostCommit_Go_VR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Samza.yml?query=event%3Aschedule) | -| [ PostCommit Go VR Spark](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Spark.yml) | N/A |`Run Go Spark ValidatesRunner`| [![.github/workflows/beam_PostCommit_Go_VR_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Java Avro Versions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Avro_Versions.yml) | N/A |`Run Java Avro Versions PostCommit`| [![.github/workflows/beam_PostCommit_Java_Avro_Versions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Avro_Versions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Avro_Versions.yml?query=event%3Aschedule) | -| [ PostCommit Java Dataflow V1 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV1.yml) | N/A |`Run PostCommit_Java_Dataflow`| [![.github/workflows/beam_PostCommit_Java_DataflowV1.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV1.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV1.yml?query=event%3Aschedule) | -| [ PostCommit Java Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV2.yml) | N/A |`Run PostCommit_Java_DataflowV2`| [![.github/workflows/beam_PostCommit_Java_DataflowV2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV2.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Dataflow ARM ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml) | ['8','11','17','21'] |`Run Java_Examples_Dataflow_ARM PostCommit (matrix_element)`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Dataflow](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow.yml) | N/A |`Run Java examples on Dataflow`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Dataflow Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml) | ['11','17','21'] |`Run Java examples on Dataflow Java (matrix_element)`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml) | N/A |`Run Java Examples on Dataflow Runner V2`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Dataflow V2 Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml) | ['11','17','21'] |`Run Java (matrix_element) Examples on Dataflow Runner V2`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Direct.yml) | N/A |`Run Java Examples_Direct`| [![.github/workflows/beam_PostCommit_Java_Examples_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Flink.yml) | N/A |`Run Java Examples_Flink`| [![.github/workflows/beam_PostCommit_Java_Examples_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Java Examples Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Spark.yml) | N/A |`Run Java Examples_Spark`| [![.github/workflows/beam_PostCommit_Java_Examples_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Java Hadoop Versions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Hadoop_Versions.yml) | N/A |`Run PostCommit_Java_Hadoop_Versions`| [![.github/workflows/beam_PostCommit_Java_Hadoop_Versions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Hadoop_Versions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Hadoop_Versions.yml?query=event%3Aschedule) | -| [ PostCommit Java InfluxDbIO Integration Test ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml) | N/A |`Run Java InfluxDbIO_IT`| [![.github/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml?query=event%3Aschedule) -| [ PostCommit Java Jpms Dataflow Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml) | N/A |`Run Jpms Dataflow Java 11 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml?query=event%3Aschedule) | -| [ PostCommit Java Jpms Dataflow Java17 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml) | N/A |`Run Jpms Dataflow Java 17 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml?query=event%3Aschedule) | -| [ PostCommit Java Jpms Direct Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml) | N/A |`Run Jpms Direct Java 11 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml?query=event%3Aschedule) | -| [ PostCommit Java Jpms Direct Java17 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml) | N/A |`Run Jpms Direct Java 17 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml?query=event%3Aschedule) | -| [ PostCommit Java Jpms Direct Java21 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml) | N/A |`Run Jpms Direct Java21 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml?query=event%3Aschedule) | -| [ PostCommit Java Jpms Flink Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml) | N/A |`Run Jpms Flink Java 11 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml?query=event%3Aschedule) | -| [ PostCommit Java Jpms Spark Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml) | N/A |`Run Jpms Spark Java 11 PostCommit`| [![.github/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml?query=event%3Aschedule) | -| [ PostCommit Java Nexmark Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml) | N/A |`Run Dataflow Runner Nexmark Tests`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Java Nexmark Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml) | N/A |`Run Dataflow Runner V2 Nexmark Tests`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml?query=event%3Aschedule) | -| [ PostCommit Java Nexmark Dataflow V2 Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml) | ['11','17'] |`Run Dataflow Runner V2 Java (matrix) Nexmark Tests`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml?query=event%3Aschedule) | -| [ PostCommit Java Nexmark Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Direct.yml) | N/A |`Run Direct Runner Nexmark Tests`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Java Nexmark Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Flink.yml) | N/A |`Run Flink Runner Nexmark Tests`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Java Nexmark Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Spark.yml) | N/A |`Run Spark Runner Nexmark Tests`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Java PVR Flink Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml) | N/A |`Run Java Flink PortableValidatesRunner Streaming`| [![.github/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml?query=event%3Aschedule) | -| [ PostCommit Java PVR Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Samza.yml) | N/A |`Run Java Samza PortableValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_PVR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Samza.yml?query=event%3Aschedule) | -| [ PostCommit Java SingleStoreIO IT ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml) | N/A |`Run Java SingleStoreIO_IT`| [![.github/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml?query=event%3Aschedule) | -| [ PostCommit Java PVR Spark3 Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml) | N/A |`Run Java Spark v3 PortableValidatesRunner Streaming`| [![.github/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml?query=event%3Aschedule) | -| [ PostCommit Java PVR Spark Batch ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml) | N/A |`Run Java Spark PortableValidatesRunner Batch`| [![.github/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml?query=event%3Aschedule) | -| [ PostCommit Java Sickbay ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Sickbay.yml) | N/A |`Run Java Sickbay`| [![.github/workflows/beam_PostCommit_Java_Sickbay.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Sickbay.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Sickbay.yml?query=event%3Aschedule) | -| [ PostCommit Java Tpcds Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml) | N/A |`Run Dataflow Runner Tpcds Tests`| [![.github/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Java Tpcds Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Flink.yml) | N/A |`Run Flink Runner Tpcds Tests`| [![.github/workflows/beam_PostCommit_Java_Tpcds_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Java Tpcds Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Spark.yml) | N/A |`Run Spark Runner Tpcds Tests`| [![.github/workflows/beam_PostCommit_Java_Tpcds_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Dataflow JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml) | ['11','17'] |`Run Dataflow ValidatesRunner Java (matrix_element)`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml) | N/A |`Run Dataflow Streaming ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Dataflow V2 Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml) | N/A |`Run Java Dataflow V2 ValidatesRunner Streaming`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml) | N/A |`Run Java Dataflow V2 ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml) | N/A |`Run Dataflow ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Direct JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml) | ['11','17'] |`Run Direct ValidatesRunner Java (matrix_element)`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml) | N/A |`Run Direct ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Flink Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml) | N/A |`Run Flink ValidatesRunner Java 11`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml) | N/A |`Run Flink ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml) | N/A |`Run Samza ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Spark Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml) | N/A |`Run Spark ValidatesRunner Java 11`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml) | N/A |`Run Spark ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner SparkStructuredStreaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml) | N/A |`Run Spark StructuredStreaming ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner Twister2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml) | N/A |`Run Twister2 ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml?query=event%3Aschedule) | -| [ PostCommit Java ValidatesRunner ULR ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml) | N/A |`Run ULR Loopback ValidatesRunner`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml?query=event%3Aschedule) | -| [ PostCommit Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java.yml) | N/A |`Run Java PostCommit`| [![.github/workflows/beam_PostCommit_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java.yml?query=event%3Aschedule) | -| [ PostCommit Javadoc ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml) | N/A |`Run Javadoc PostCommit`| [![.github/workflows/beam_PostCommit_Javadoc.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml?query=event%3Aschedule) | -| [ PostCommit PortableJar Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml) | N/A |`Run PortableJar_Flink PostCommit`| [![.github/workflows/beam_PostCommit_PortableJar_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml?query=event%3Aschedule) | -| [ PostCommit PortableJar Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml) | N/A |`Run PortableJar_Spark PostCommit`| [![.github/workflows/beam_PostCommit_PortableJar_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python PostCommit (matrix_element)`| [![.github/workflows/beam_PostCommit_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml?query=event%3Aschedule) | -| [ PostCommit Python Arm](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python PostCommit Arm (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_Arm.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml) | N/A |`Run Python Examples_Dataflow`| [![.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python Examples_Direct (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_Examples_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml) | ['3.8','3.11'] |`Run Python Examples_Flink (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_Examples_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml) | ['3.8','3.11'] |`Run Python Examples_Spark (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_Examples_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Python MongoDBIO IT ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml) | N/A |`Run Python MongoDBIO_IT`| [![.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml?query=event%3Aschedule) | -| [ PostCommit Python Nexmark Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml) | N/A |`Run Python Direct Runner Nexmark Tests`| [![.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesContainer Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python Dataflow ValidatesContainer (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesContainer Dataflow With RC ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python RC Dataflow ValidatesContainer (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml) | ['3.8','3.11'] |`Run Python Dataflow ValidatesRunner (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml) | ['3.8','3.11'] |`Run Python Flink ValidatesRunner (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml) | ['3.8','3.11'] |`Run Python Samza ValidatesRunner (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml) | ['3.8','3.9','3.11'] |`Run Python Spark ValidatesRunner (matrix_element)`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Python Xlang Gcp Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml) | N/A |`Run Python_Xlang_Gcp_Dataflow PostCommit`| [![.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python Xlang Gcp Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml) | N/A |`Run Python_Xlang_Gcp_Direct PostCommit`| [![.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Python Xlang IO Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml) | N/A |`Run Python_Xlang_IO_Dataflow PostCommit`| [![.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Sickbay Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python (matrix_element) PostCommit Sickbay`| [![.github/workflows/beam_PostCommit_Sickbay_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml?query=event%3Aschedule) | -| [ PostCommit SQL ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml) | N/A |`Run SQL PostCommit`| [![.github/workflows/beam_PostCommit_SQL.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml?query=event%3Aschedule) | -| [ PostCommit TransformService Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml) | N/A |`Run TransformService_Direct PostCommit`| [![.github/workflows/beam_PostCommit_TransformService_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml?query=event%3Aschedule) -| [ PostCommit Website Publish ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Publish.yml) | N/A | N/A | [![.github/workflows/beam_PostCommit_Website_Publish.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Publish.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Publish.yml?query=event%3Aschedule) | -| [ PostCommit Website Test](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml) | N/A |`Run Full Website Test`| [![.github/workflows/beam_PostCommit_Website_Test.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml?query=event%3Aschedule) | -| [ PostCommit XVR GoUsingJava Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml) | N/A |`Run XVR_GoUsingJava_Dataflow PostCommit`| [![.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit XVR Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Direct.yml) | N/A |`Run XVR_Direct PostCommit`| [![.github/workflows/beam_PostCommit_XVR_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Direct.yml?query=event%3Aschedule) | -| [ PostCommit XVR Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Flink.yml) | N/A |`Run XVR_Flink PostCommit`| [![.github/workflows/beam_PostCommit_XVR_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Flink.yml?query=event%3Aschedule) | -| [ PostCommit XVR JavaUsingPython Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml) | N/A |`Run XVR_JavaUsingPython_Dataflow PostCommit`| [![.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit XVR PythonUsingJava Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml) | N/A |`Run XVR_PythonUsingJava_Dataflow PostCommit`| [![.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit XVR PythonUsingJavaSQL Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml) | N/A |`Run XVR_PythonUsingJavaSQL_Dataflow PostCommit`| [![.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit XVR Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml) | N/A |`Run XVR_Samza PostCommit`| [![.github/workflows/beam_PostCommit_XVR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml?query=event%3Aschedule) | -| [ PostCommit XVR Spark3 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml) | N/A |`Run XVR_Spark3 PostCommit`| [![.github/workflows/beam_PostCommit_XVR_Spark3.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml?query=event%3Aschedule) | -| [ Python Validates Container Dataflow ARM ](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml) | ['3.8','3.9','3.10','3.11'] |`Run Python ValidatesContainer Dataflow ARM (matrix_element)`|[![.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml?query=event%3Aschedule) | +| [ PostCommit Go ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go.yml) | N/A |`beam_PostCommit_Go.json`| [![.github/workflows/beam_PostCommit_Go.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go.yml?query=event%3Aschedule) | +| [ PostCommit Go Dataflow ARM](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_Dataflow_ARM.yml) | N/A |`beam_PostCommit_Go_Dataflow_ARM.json`| [![.github/workflows/beam_PostCommit_Go_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_Dataflow_ARM.yml?query=event%3Aschedule) | +| [ PostCommit Go VR Flink](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Flink.yml) | N/A |`beam_PostCommit_Go_VR_Flink.json`| [![.github/workflows/beam_PostCommit_Go_VR_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Go VR Samza](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Samza.yml) | N/A |`beam_PostCommit_Go_VR_Samza.json`| [![.github/workflows/beam_PostCommit_Go_VR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Samza.yml?query=event%3Aschedule) | +| [ PostCommit Go VR Spark](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Spark.yml) | N/A |`beam_PostCommit_Go_VR_Spark.json`| [![.github/workflows/beam_PostCommit_Go_VR_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Go_VR_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Java Avro Versions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Avro_Versions.yml) | N/A |`beam_PostCommit_Java_Avro_Versions.json`| [![.github/workflows/beam_PostCommit_Java_Avro_Versions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Avro_Versions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Avro_Versions.yml?query=event%3Aschedule) | +| [ PostCommit Java Dataflow V1 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV1.yml) | N/A |`beam_PostCommit_Java_DataflowV1.json`| [![.github/workflows/beam_PostCommit_Java_DataflowV1.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV1.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV1.yml?query=event%3Aschedule) | +| [ PostCommit Java Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV2.yml) | N/A |`beam_PostCommit_Java_DataflowV2.json`| [![.github/workflows/beam_PostCommit_Java_DataflowV2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_DataflowV2.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Dataflow ARM ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml) | ['8','11','17','21'] |`beam_PostCommit_Java_Examples_Dataflow_ARM.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Dataflow](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow.yml) | N/A |`beam_PostCommit_Java_Examples_Dataflow.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Dataflow Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml) | ['11','17','21'] |`beam_PostCommit_Java_Examples_Dataflow_Java.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml) | N/A |`beam_PostCommit_Java_Examples_Dataflow_V2.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Dataflow V2 Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml) | ['11','17','21'] |`beam_PostCommit_Java_Examples_Dataflow_V2_Java.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Direct.yml) | N/A |`beam_PostCommit_Java_Examples_Direct.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Flink.yml) | N/A |`beam_PostCommit_Java_Examples_Flink.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Java Examples Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Spark.yml) | N/A |`beam_PostCommit_Java_Examples_Spark.json`| [![.github/workflows/beam_PostCommit_Java_Examples_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Examples_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Java Hadoop Versions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Hadoop_Versions.yml) | N/A |`beam_PostCommit_Java_Hadoop_Versions.json`| [![.github/workflows/beam_PostCommit_Java_Hadoop_Versions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Hadoop_Versions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Hadoop_Versions.yml?query=event%3Aschedule) | +| [ PostCommit Java IO Performance Tests ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml) | N/A |`beam_PostCommit_Java_IO_Performance_Tests.json`| [![.github/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml?query=event%3Aschedule) +| [ PostCommit Java InfluxDbIO Integration Test ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml) | N/A |`beam_PostCommit_Java_InfluxDbIO_IT.json`| [![.github/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml?query=event%3Aschedule) +| [ PostCommit Java Jpms Dataflow Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml) | N/A |`beam_PostCommit_Java_Jpms_Dataflow_Java11.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml?query=event%3Aschedule) | +| [ PostCommit Java Jpms Dataflow Java17 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml) | N/A |`beam_PostCommit_Java_Jpms_Dataflow_Java17.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml?query=event%3Aschedule) | +| [ PostCommit Java Jpms Direct Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml) | N/A |`beam_PostCommit_Java_Jpms_Direct_Java11.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml?query=event%3Aschedule) | +| [ PostCommit Java Jpms Direct Java17 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml) | N/A |`beam_PostCommit_Java_Jpms_Direct_Java17.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml?query=event%3Aschedule) | +| [ PostCommit Java Jpms Direct Java21 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml) | N/A |`beam_PostCommit_Java_Jpms_Direct_Java21.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml?query=event%3Aschedule) | +| [ PostCommit Java Jpms Flink Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml) | N/A |`beam_PostCommit_Java_Jpms_Flink_Java11.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml?query=event%3Aschedule) | +| [ PostCommit Java Jpms Spark Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml) | N/A |`beam_PostCommit_Java_Jpms_Spark_Java11.json`| [![.github/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml?query=event%3Aschedule) | +| [ PostCommit Java Nexmark Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml) | N/A |`beam_PostCommit_Java_Nexmark_Dataflow.json`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Java Nexmark Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml) | N/A |`beam_PostCommit_Java_Nexmark_Dataflow_V2.json`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml?query=event%3Aschedule) | +| [ PostCommit Java Nexmark Dataflow V2 Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml) | ['11','17'] |`beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.json`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml?query=event%3Aschedule) | +| [ PostCommit Java Nexmark Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Direct.yml) | N/A |`beam_PostCommit_Java_Nexmark_Direct.json`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Java Nexmark Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Flink.yml) | N/A |`beam_PostCommit_Java_Nexmark_Flink.json`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Java Nexmark Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Spark.yml) | N/A |`beam_PostCommit_Java_Nexmark_Spark.json`| [![.github/workflows/beam_PostCommit_Java_Nexmark_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Nexmark_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Java PVR Flink Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml) | N/A |`beam_PostCommit_Java_PVR_Flink_Streaming.json`| [![.github/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml?query=event%3Aschedule) | +| [ PostCommit Java PVR Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Samza.yml) | N/A |`beam_PostCommit_Java_PVR_Samza.json`| [![.github/workflows/beam_PostCommit_Java_PVR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Samza.yml?query=event%3Aschedule) | +| [ PostCommit Java SingleStoreIO IT ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml) | N/A |`beam_PostCommit_Java_SingleStoreIO_IT.json`| [![.github/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml?query=event%3Aschedule) | +| [ PostCommit Java PVR Spark3 Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml) | N/A |`beam_PostCommit_Java_PVR_Spark3_Streaming.json`| [![.github/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml?query=event%3Aschedule) | +| [ PostCommit Java PVR Spark Batch ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml) | N/A |`beam_PostCommit_Java_PVR_Spark_Batch.json`| [![.github/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml?query=event%3Aschedule) | +| [ PostCommit Java Sickbay ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Sickbay.yml) | N/A |`beam_PostCommit_Java_Sickbay.json`| [![.github/workflows/beam_PostCommit_Java_Sickbay.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Sickbay.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Sickbay.yml?query=event%3Aschedule) | +| [ PostCommit Java Tpcds Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml) | N/A |`beam_PostCommit_Java_Tpcds_Dataflow.json`| [![.github/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Java Tpcds Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Flink.yml) | N/A |`beam_PostCommit_Java_Tpcds_Flink.json`| [![.github/workflows/beam_PostCommit_Java_Tpcds_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Java Tpcds Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Spark.yml) | N/A |`beam_PostCommit_Java_Tpcds_Spark.json`| [![.github/workflows/beam_PostCommit_Java_Tpcds_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_Tpcds_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Dataflow JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml) | ['11','17'] |`beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Dataflow V2 Streaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Dataflow V2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Dataflow.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Direct JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml) | ['11','17'] |`beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Direct.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Flink Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Flink_Java11.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Flink.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Samza.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Spark Java11 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Spark_Java11.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Spark.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner SparkStructuredStreaming ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner Twister2 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_Twister2.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml?query=event%3Aschedule) | +| [ PostCommit Java ValidatesRunner ULR ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml) | N/A |`beam_PostCommit_Java_ValidatesRunner_ULR.json`| [![.github/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml?query=event%3Aschedule) | +| [ PostCommit Java ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java.yml) | N/A |`beam_PostCommit_Java.json`| [![.github/workflows/beam_PostCommit_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Java.yml?query=event%3Aschedule) | +| [ PostCommit Javadoc ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml) | N/A |`beam_PostCommit_Javadoc.json`| [![.github/workflows/beam_PostCommit_Javadoc.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml?query=event%3Aschedule) | +| [ PostCommit PortableJar Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml) | N/A |`beam_PostCommit_PortableJar_Flink.json`| [![.github/workflows/beam_PostCommit_PortableJar_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml?query=event%3Aschedule) | +| [ PostCommit PortableJar Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml) | N/A |`beam_PostCommit_PortableJar_Spark.json`| [![.github/workflows/beam_PostCommit_PortableJar_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python.json`| [![.github/workflows/beam_PostCommit_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml?query=event%3Aschedule) | +| [ PostCommit Python Arm](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_Arm.json`| [![.github/workflows/beam_PostCommit_Python_Arm.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml) | N/A |`beam_PostCommit_Python_Examples_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_Examples_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_Examples_Flink.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_Examples_Spark.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Python MongoDBIO IT ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml) | N/A |`beam_PostCommit_Python_MongoDBIO_IT.json`| [![.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml?query=event%3Aschedule) | +| [ PostCommit Python Nexmark Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml) | N/A |`beam_PostCommit_Python_Nexmark_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesContainer Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_ValidatesContainer_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesContainer Dataflow With RC ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Flink.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Samza.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml) | ['3.8','3.9','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Spark.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Python Xlang Gcp Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml) | N/A |`beam_PostCommit_Python_Xlang_Gcp_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Python Xlang Gcp Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml) | N/A |`beam_PostCommit_Python_Xlang_Gcp_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Python Xlang IO Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml) | N/A |`beam_PostCommit_Python_Xlang_IO_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Sickbay Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Sickbay_Python.json`| [![.github/workflows/beam_PostCommit_Sickbay_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml?query=event%3Aschedule) | +| [ PostCommit SQL ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml) | N/A |`beam_PostCommit_SQL.json`| [![.github/workflows/beam_PostCommit_SQL.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml?query=event%3Aschedule) | +| [ PostCommit TransformService Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml) | N/A |`beam_PostCommit_TransformService_Direct.json`| [![.github/workflows/beam_PostCommit_TransformService_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml?query=event%3Aschedule) +| [ PostCommit Website Test](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml) | N/A |`beam_PostCommit_Website_Test.json`| [![.github/workflows/beam_PostCommit_Website_Test.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml?query=event%3Aschedule) | +| [ PostCommit XVR GoUsingJava Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml) | N/A |`beam_PostCommit_XVR_GoUsingJava_Dataflow.json`| [![.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit XVR Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Direct.yml) | N/A |`beam_PostCommit_XVR_Direct.json`| [![.github/workflows/beam_PostCommit_XVR_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Direct.yml?query=event%3Aschedule) | +| [ PostCommit XVR Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Flink.yml) | N/A |`beam_PostCommit_XVR_Flink.json`| [![.github/workflows/beam_PostCommit_XVR_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Flink.yml?query=event%3Aschedule) | +| [ PostCommit XVR JavaUsingPython Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml) | N/A |`beam_PostCommit_XVR_JavaUsingPython_Dataflow.json`| [![.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit XVR PythonUsingJava Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml) | N/A |`beam_PostCommit_XVR_PythonUsingJava_Dataflow.json`| [![.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit XVR PythonUsingJavaSQL Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml) | N/A |`beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.json`| [![.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit XVR Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml) | N/A |`beam_PostCommit_XVR_Samza.json`| [![.github/workflows/beam_PostCommit_XVR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml?query=event%3Aschedule) | +| [ PostCommit XVR Spark3 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml) | N/A |`beam_PostCommit_XVR_Spark3.json`| [![.github/workflows/beam_PostCommit_XVR_Spark3.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml?query=event%3Aschedule) | +| [ Python Validates Container Dataflow ARM ](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml) | ['3.8','3.9','3.10','3.11'] |`beam_Python_ValidatesContainer_Dataflow_ARM.json`|[![.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml?query=event%3Aschedule) | ### PerformanceTests and Benchmark Jobs -| Workflow name | Matrix | Trigger Phrase | Cron Status | -|:-------------:|:------:|:--------------:|:-----------:| -| [ CloudML Benchmarks Dataflow ](https://github.com/apache/beam/actions/workflows/beam_CloudML_Benchmarks_Dataflow.yml) | N/A |`Run TFT Criteo Benchmarks`| [![.github/workflows/beam_CloudML_Benchmarks_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_CloudML_Benchmarks_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CloudML_Benchmarks_Dataflow.yml?query=event%3Aschedule) -| [ Inference Python Benchmarks Dataflow ](https://github.com/apache/beam/actions/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml) | N/A |`Run Inference Benchmarks`| [![.github/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml?query=event%3Aschedule) -| [ Java JMH ](https://github.com/apache/beam/actions/workflows/beam_Java_JMH.yml) | N/A | N/A | [![.github/workflows/beam_Java_JMH.yml](https://github.com/apache/beam/actions/workflows/beam_Java_JMH.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Java_JMH.yml?query=event%3Aschedule) -| [ Performance Tests AvroIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml) | N/A |`Run Java AvroIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml?query=event%3Aschedule) -| [ Performance Tests AvroIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT.yml) | N/A |`Run Java AvroIO Performance Test`| [![.github/workflows/beam_PerformanceTests_AvroIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT.yml?query=event%3Aschedule) -| [ Performance Tests BigQueryIO Batch Java Avro ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml) | N/A |`Run BigQueryIO Batch Performance Test Java Avro`| [![.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml?query=event%3Aschedule) -| [ Performance Tests BigQueryIO Batch Java Json ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml) | N/A |`Run BigQueryIO Batch Performance Test Java Json`| [![.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml?query=event%3Aschedule) -| [ Performance Tests BigQueryIO Streaming Java ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml) | N/A |`Run BigQueryIO Streaming Performance Test Java`| [![.github/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml?query=event%3Aschedule) -| [ Performance Tests BigQueryIO Read Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml) | N/A |`Run BigQueryIO Read Performance Test Python`| [![.github/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml?query=event%3Aschedule) -| [ Performance Tests BigQueryIO Write Python Batch ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml) | N/A |`Run BigQueryIO Write Performance Test Python`| [![.github/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml?query=event%3Aschedule) -| [ PerformanceTests Cdap ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Cdap.yml) | N/A |`Run Java CdapIO Performance Test`| [![.github/workflows/beam_PerformanceTests_Cdap.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Cdap.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Cdap.yml?query=event%3Aschedule) -| [ PerformanceTests Compressed TextIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml) | N/A |`Run Java CompressedTextIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml?query=event%3Aschedule) -| [ PerformanceTests Compressed TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml) | N/A |`Run Java CompressedTextIO Performance Test`| [![.github/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests HadoopFormat ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_HadoopFormat.yml) | N/A |`Run Java HadoopFormatIO Performance Test`| [![.github/workflows/beam_PerformanceTests_HadoopFormat.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_HadoopFormat.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_HadoopFormat.yml?query=event%3Aschedule) -| [ PerformanceTests JDBC ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_JDBC.yml) | N/A |`Run Java JdbcIO Performance Test`| [![.github/workflows/beam_PerformanceTests_JDBC.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_JDBC.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_JDBC.yml?query=event%3Aschedule) -| [ PerformanceTests Kafka IO ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Kafka_IO.yml) | N/A |`Run Java KafkaIO Performance Test`| [![.github/workflows/beam_PerformanceTests_Kafka_IO.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Kafka_IO.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Kafka_IO.yml?query=event%3Aschedule) -| [ PerformanceTests ManyFiles TextIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml) | N/A |`Run Java ManyFilesTextIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml?query=event%3Aschedule) -| [ PerformanceTests ManyFiles TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml) | N/A |`Run Java ManyFilesTextIO Performance Test`| [![.github/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests MongoDBIO IT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_MongoDBIO_IT.yml) | N/A |`Run Java MongoDBIO Performance Test`| [![.github/workflows/beam_PerformanceTests_MongoDBIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_MongoDBIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_MongoDBIO_IT.yml?query=event%3Aschedule) -| [ PerformanceTests ParquetIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml) | N/A |`Run Java ParquetIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml?query=event%3Aschedule) -| [ PerformanceTests ParquetIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT.yml) | N/A |`Run Java ParquetIO Performance Test`| [![.github/workflows/beam_PerformanceTests_ParquetIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests PubsubIOIT Python Streaming ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml) | N/A |`Run PubsubIO Performance Test Python`| [![.github/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml?query=event%3Aschedule) -| [ PerformanceTests SingleStoreIO ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SingleStoreIO.yml) | N/A |`Run Java SingleStoreIO Performance Test`| [![.github/workflows/beam_PerformanceTests_SingleStoreIO.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SingleStoreIO.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SingleStoreIO.yml?query=event%3Aschedule) -| [ PerformanceTests SpannerIO Read 2GB Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml) | N/A |`Run SpannerIO Read 2GB Performance Test Python`| [![.github/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml?query=event%3Aschedule) -| [ PerformanceTests SpannerIO Write 2GB Python Batch ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml) | N/A |`Run SpannerIO Write 2GB Performance Test Python Batch`| [![.github/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml?query=event%3Aschedule) -| [ PerformanceTests SparkReceiver IO ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SparkReceiver_IO.yml) | N/A |`Run Java SparkReceiverIO Performance Test`| [![.github/workflows/beam_PerformanceTests_SparkReceiver_IO.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SparkReceiver_IO.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SparkReceiver_IO.yml?query=event%3Aschedule) -| [ PerformanceTests SQLBigQueryIO Batch Java ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml) | N/A |`Run SQLBigQueryIO Batch Performance Test Java`| [![.github/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml?query=event%3Aschedule) -| [ PerformanceTests TextIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml) | N/A |`Run Java TextIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml?query=event%3Aschedule) -| [ PerformanceTests TextIOIT Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_Python.yml) | N/A |`Run Python TextIO Performance Test`| [![.github/workflows/beam_PerformanceTests_TextIOIT_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_Python.yml?query=event%3Aschedule) -| [ PerformanceTests TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml) | N/A |`Run Java TextIO Performance Test`| [![.github/workflows/beam_PerformanceTests_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests TFRecordIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml) | N/A |`Run Java TFRecordIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml?query=event%3Aschedule) -| [ PerformanceTests TFRecordIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml) | N/A |`Run Java TFRecordIO Performance Test`| [![.github/workflows/beam_PerformanceTests_TFRecordIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests WordCountIT PythonVersions ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml) | ['3.8'] |`Run Python (matrix_element) WordCountIT Performance Test`| [![.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml?query=event%3Aschedule) -| [ PerformanceTests XmlIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml) | N/A |`Run Java XmlIO Performance Test HDFS`| [![.github/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml?query=event%3Aschedule) -| [ PerformanceTests XmlIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml) | N/A |`Run Java XmlIO Performance Test`| [![.github/workflows/beam_PerformanceTests_XmlIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests xlang KafkaIO Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml) | N/A |`Run Python xlang KafkaIO Performance Test`| [![.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml?query=event%3Aschedule) +| Workflow name | Matrix | Cron Status | +|:-------------:|:------:|:-----------:| +| [ CloudML Benchmarks Dataflow ](https://github.com/apache/beam/actions/workflows/beam_CloudML_Benchmarks_Dataflow.yml) | N/A | [![.github/workflows/beam_CloudML_Benchmarks_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_CloudML_Benchmarks_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CloudML_Benchmarks_Dataflow.yml?query=event%3Aschedule) +| [ Inference Python Benchmarks Dataflow ](https://github.com/apache/beam/actions/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml) | N/A | [![.github/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml?query=event%3Aschedule) +| [ Java JMH ](https://github.com/apache/beam/actions/workflows/beam_Java_JMH.yml) | N/A | [![.github/workflows/beam_Java_JMH.yml](https://github.com/apache/beam/actions/workflows/beam_Java_JMH.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Java_JMH.yml?query=event%3Aschedule) +| [ Performance Tests AvroIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT_HDFS.yml?query=event%3Aschedule) +| [ Performance Tests AvroIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_AvroIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_AvroIOIT.yml?query=event%3Aschedule) +| [ Performance Tests BigQueryIO Batch Java Avro ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml) | N/A | [![.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml?query=event%3Aschedule) +| [ Performance Tests BigQueryIO Batch Java Json ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml) | N/A | [![.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml?query=event%3Aschedule) +| [ Performance Tests BigQueryIO Streaming Java ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml) | N/A | [![.github/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml?query=event%3Aschedule) +| [ Performance Tests BigQueryIO Read Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml) | N/A | [![.github/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml?query=event%3Aschedule) +| [ Performance Tests BigQueryIO Write Python Batch ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml) | N/A | [![.github/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml?query=event%3Aschedule) +| [ PerformanceTests Cdap ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Cdap.yml) | N/A | [![.github/workflows/beam_PerformanceTests_Cdap.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Cdap.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Cdap.yml?query=event%3Aschedule) +| [ PerformanceTests Compressed TextIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT_HDFS.yml?query=event%3Aschedule) +| [ PerformanceTests Compressed TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Compressed_TextIOIT.yml?query=event%3Aschedule) +| [ PerformanceTests HadoopFormat ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_HadoopFormat.yml) | N/A | [![.github/workflows/beam_PerformanceTests_HadoopFormat.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_HadoopFormat.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_HadoopFormat.yml?query=event%3Aschedule) +| [ PerformanceTests JDBC ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_JDBC.yml) | N/A | [![.github/workflows/beam_PerformanceTests_JDBC.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_JDBC.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_JDBC.yml?query=event%3Aschedule) +| [ PerformanceTests Kafka IO ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Kafka_IO.yml) | N/A | [![.github/workflows/beam_PerformanceTests_Kafka_IO.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Kafka_IO.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_Kafka_IO.yml?query=event%3Aschedule) +| [ PerformanceTests ManyFiles TextIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT_HDFS.yml?query=event%3Aschedule) +| [ PerformanceTests ManyFiles TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ManyFiles_TextIOIT.yml?query=event%3Aschedule) +| [ PerformanceTests MongoDBIO IT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_MongoDBIO_IT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_MongoDBIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_MongoDBIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_MongoDBIO_IT.yml?query=event%3Aschedule) +| [ PerformanceTests ParquetIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT_HDFS.yml?query=event%3Aschedule) +| [ PerformanceTests ParquetIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_ParquetIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_ParquetIOIT.yml?query=event%3Aschedule) +| [ PerformanceTests PubsubIOIT Python Streaming ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml) | N/A | [![.github/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml?query=event%3Aschedule) +| [ PerformanceTests SingleStoreIO ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SingleStoreIO.yml) | N/A | [![.github/workflows/beam_PerformanceTests_SingleStoreIO.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SingleStoreIO.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SingleStoreIO.yml?query=event%3Aschedule) +| [ PerformanceTests SpannerIO Read 2GB Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml) | N/A | [![.github/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml?query=event%3Aschedule) +| [ PerformanceTests SpannerIO Write 2GB Python Batch ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml) | N/A | [![.github/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml?query=event%3Aschedule) +| [ PerformanceTests SparkReceiver IO ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SparkReceiver_IO.yml) | N/A | [![.github/workflows/beam_PerformanceTests_SparkReceiver_IO.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SparkReceiver_IO.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SparkReceiver_IO.yml?query=event%3Aschedule) +| [ PerformanceTests SQLBigQueryIO Batch Java ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml) | N/A | [![.github/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml?query=event%3Aschedule) +| [ PerformanceTests TextIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_HDFS.yml?query=event%3Aschedule) +| [ PerformanceTests TextIOIT Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_Python.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TextIOIT_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT_Python.yml?query=event%3Aschedule) +| [ PerformanceTests TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml?query=event%3Aschedule) +| [ PerformanceTests TFRecordIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml?query=event%3Aschedule) +| [ PerformanceTests TFRecordIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TFRecordIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml?query=event%3Aschedule) +| [ PerformanceTests WordCountIT PythonVersions ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml) | ['3.8'] | [![.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml?query=event%3Aschedule) +| [ PerformanceTests XmlIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml?query=event%3Aschedule) +| [ PerformanceTests XmlIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_XmlIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml?query=event%3Aschedule) +| [ PerformanceTests xlang KafkaIO Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml) | N/A | [![.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml?query=event%3Aschedule) ### LoadTests Jobs -| Workflow name | Matrix | Trigger Phrase | Cron Status | -|:-------------:|:------:|:--------------:|:-----------:| -| [ LoadTests Go CoGBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml) | N/A |`Run LoadTests Go CoGBK Dataflow Batch`| [![.github/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go CoGBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml) | N/A |`Run Load Tests Go CoGBK Flink Batch`| [![.github/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml?query=event%3Aschedule) -| [ LoadTests Go Combine Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml) | N/A |`Run Load Tests Go Combine Dataflow Batch`| [![.github/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go Combine Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml) | N/A |`Run Load Tests Go Combine Flink Batch`| [![.github/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go GBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml) | N/A |`Run Load Tests Go GBK Dataflow Batch`| [![.github/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go GBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml) | N/A |`Run Load Tests Go GBK Flink Batch`| [![.github/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go ParDo Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml) | N/A |`Run Load Tests Go ParDo Dataflow Batch`| [![.github/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go ParDo Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml) | N/A |`Run Load Tests Go ParDo Flink Batch`| [![.github/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go SideInput Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml) | N/A |`Run Load Tests Go SideInput Dataflow Batch`| [![.github/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Go SideInput Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml) | N/A |`Run Load Tests Go SideInput Flink Batch`| [![.github/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java CoGBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml) | N/A |`Run Load Tests Java CoGBK Dataflow Batch`| [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java CoGBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml) | N/A |`Run Load Tests Java CoGBK Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Java CoGBK Dataflow V2 Batch JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml) | ['11','17'] |`Run Load Tests Java (matrix_element) CoGBK Dataflow V2 Batch`| [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml?query=event%3Aschedule) -| [ LoadTests Java CoGBK Dataflow V2 Streaming JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml) | ['11','17'] |`Run Load Tests Java (matrix_element) CoGBK Dataflow V2 Streaming`| [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml?query=event%3Aschedule) -| [ LoadTests Java CoGBK SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml) | N/A |`Run Load Tests Java CoGBK SparkStructuredStreaming Batch`| [![.github/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java Combine Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml) | N/A |`Run Load Tests Java Combine Dataflow Batch`| [![.github/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java Combine Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml) | N/A |`Run Load Tests Java Combine Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Java Combine SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml) | N/A |`Run Load Tests Java Combine SparkStructuredStreaming Batch`| [![.github/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml) | N/A |`Run Load Tests Java GBK Dataflow Batch`| [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml) | N/A |`Run Load Tests Java GBK Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Dataflow V2 Batch Java11 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml) | N/A |`Run Load Tests Java 11 GBK Dataflow V2 Batch`| [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Dataflow V2 Batch Java17 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml) | N/A |`Run Load Tests Java 17 GBK Dataflow V2 Batch`| [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Dataflow V2 Streaming Java11 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml) | N/A |`Run Load Tests Java 11 GBK Dataflow V2 Streaming`| [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Dataflow V2 Streaming Java17 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml) | N/A |`Run Load Tests Java 17 GBK Dataflow V2 Streaming`| [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml?query=event%3Aschedule) -| [ LoadTests Java GBK Smoke ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Smoke.yml) | N/A |`Run Java Load Tests GBK Smoke`| [![.github/workflows/beam_LoadTests_Java_GBK_Smoke.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Smoke.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Smoke.yml?query=event%3Aschedule) -| [ LoadTests Java GBK SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml) | N/A |`Run Load Tests Java GBK SparkStructuredStreaming Batch`| [![.github/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java ParDo Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml) | N/A |`Run Load Tests Java ParDo Dataflow Batch`| [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java ParDo Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml) | N/A |`Run Load Tests Java ParDo Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Java ParDo Dataflow V2 Batch JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml) | ['11','17'] |`Run Load Tests Java (matrix_element) ParDo Dataflow V2 Batch`| [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml?query=event%3Aschedule) -| [ LoadTests Java ParDo Dataflow V2 Streaming JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml) | ['11','17'] |`Run Load Tests Java (matrix_element) ParDo Dataflow V2 Streaming`| [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml?query=event%3Aschedule) -| [ LoadTests Java ParDo SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml) | N/A |`Run Load Tests Java ParDo SparkStructuredStreaming Batch`| [![.github/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) -| [ LoadTests Java Combine Smoke ](https://github.com/apache/beam/actions/workflows/beam_Java_LoadTests_Combine_Smoke.yml) | N/A | N/A | [![.github/workflows/beam_Java_LoadTests_Combine_Smoke.yml](https://github.com/apache/beam/actions/workflows/beam_Java_LoadTests_Combine_Smoke.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Java_LoadTests_Combine_Smoke.yml?query=event%3Aschedule) -| [ LoadTests Python CoGBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml) | N/A |`Run Load Tests Python CoGBK Dataflow Batch`| [![.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python CoGBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml) | N/A |`Run Load Tests Python CoGBK Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python CoGBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml) | N/A |`Run Load Tests Python CoGBK Flink Batch`| [![.github/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python Combine Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml) | N/A |`Run Load Tests Python Combine Dataflow Batch`| [![.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python Combine Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml) | N/A |`Run Load Tests Python Combine Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python Combine Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml) | N/A |`Run Load Tests Python Combine Flink Batch`| [![.github/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python Combine Flink Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml) | N/A |`Run Load Tests Python Combine Flink Streaming`| [![.github/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python FnApiRunner Microbenchmark ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml) | N/A |`Run Python Load Tests FnApiRunner Microbenchmark`| [![.github/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml?query=event%3Aschedule) -| [ LoadTests Python GBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml) | N/A |`Run Load Tests Python GBK Dataflow Batch`| [![.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python GBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml) | N/A |`Run Load Tests Python GBK Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python GBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml) | N/A |`Run Load Tests Python GBK Flink Batch`| [![.github/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python GBK reiterate Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml) | N/A |`Run Load Tests Python GBK reiterate Dataflow Batch`| [![.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python GBK reiterate Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml) | N/A |`Run Load Tests Python GBK reiterate Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python ParDo Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml) | N/A |`Run Load Tests Python ParDo Dataflow Batch`| [![.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python ParDo Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml) | N/A |`Run Python Load Tests ParDo Dataflow Streaming`| [![.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python ParDo Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml) | N/A |`Run Load Tests Python ParDo Flink Batch`| [![.github/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python ParDo Flink Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml) | N/A |`Run Load Tests Python ParDo Flink Streaming`| [![.github/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml?query=event%3Aschedule) -| [ LoadTests Python SideInput Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml) | N/A |`Run Load Tests Python SideInput Dataflow Batch`| [![.github/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml?query=event%3Aschedule) -| [ LoadTests Python Smoke ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Smoke.yml) | N/A |`Run Python Load Tests Smoke`| [![.github/workflows/beam_LoadTests_Python_Smoke.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Smoke.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Smoke.yml?query=event%3Aschedule) +| Workflow name | Matrix | Cron Status | +|:-------------:|:------:|:-----------:| +| [ LoadTests Go CoGBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go CoGBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_CoGBK_Flink_batch.yml?query=event%3Aschedule) +| [ LoadTests Go Combine Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go Combine Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_Combine_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go GBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go GBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_GBK_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go ParDo Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go ParDo Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_ParDo_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go SideInput Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Go SideInput Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Go_SideInput_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java CoGBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java CoGBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Java CoGBK Dataflow V2 Batch JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml) | ['11','17'] | [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Batch_JavaVersions.yml?query=event%3Aschedule) +| [ LoadTests Java CoGBK Dataflow V2 Streaming JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml) | ['11','17'] | [![.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_Dataflow_V2_Streaming_JavaVersions.yml?query=event%3Aschedule) +| [ LoadTests Java CoGBK SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_CoGBK_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java Combine Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java Combine Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Java Combine SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Dataflow V2 Batch Java11 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java11.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Dataflow V2 Batch Java17 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Batch_Java17.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Dataflow V2 Streaming Java11 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java11.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Dataflow V2 Streaming Java17 ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Dataflow_V2_Streaming_Java17.yml?query=event%3Aschedule) +| [ LoadTests Java GBK Smoke ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Smoke.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_Smoke.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Smoke.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_Smoke.yml?query=event%3Aschedule) +| [ LoadTests Java GBK SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_GBK_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java ParDo Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java ParDo Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Java ParDo Dataflow V2 Batch JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml) | ['11','17'] | [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Batch_JavaVersions.yml?query=event%3Aschedule) +| [ LoadTests Java ParDo Dataflow V2 Streaming JavaVersions ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml) | ['11','17'] | [![.github/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_Dataflow_V2_Streaming_JavaVersions.yml?query=event%3Aschedule) +| [ LoadTests Java ParDo SparkStructuredStreaming Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Java_ParDo_SparkStructuredStreaming_Batch.yml?query=event%3Aschedule) +| [ LoadTests Java Combine Smoke ](https://github.com/apache/beam/actions/workflows/beam_Java_LoadTests_Combine_Smoke.yml) | N/A | [![.github/workflows/beam_Java_LoadTests_Combine_Smoke.yml](https://github.com/apache/beam/actions/workflows/beam_Java_LoadTests_Combine_Smoke.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Java_LoadTests_Combine_Smoke.yml?query=event%3Aschedule) +| [ LoadTests Python CoGBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python CoGBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python CoGBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_CoGBK_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python Combine Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python Combine Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python Combine Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python Combine Flink Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python FnApiRunner Microbenchmark ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml?query=event%3Aschedule) +| [ LoadTests Python GBK Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python GBK Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python GBK Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python GBK reiterate Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python GBK reiterate Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python ParDo Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python ParDo Dataflow Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python ParDo Flink Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python ParDo Flink Streaming ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml?query=event%3Aschedule) +| [ LoadTests Python SideInput Dataflow Batch ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml?query=event%3Aschedule) +| [ LoadTests Python Smoke ](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Smoke.yml) | N/A | [![.github/workflows/beam_LoadTests_Python_Smoke.yml](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Smoke.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_LoadTests_Python_Smoke.yml?query=event%3Aschedule) ### Other Jobs -| Workflow name | Matrix | Trigger Phrase | Cron Status | -|:-------------:|:------:|:--------------:|:-----------:| -| [ Cancel Stale Dataflow Jobs ](https://github.com/apache/beam/actions/workflows/beam_CancelStaleDataflowJobs.yml) | N/A | `Run Cancel Stale Dataflow Jobs` | [![.github/workflows/beam_CancelStaleDataflowJobs.yml](https://github.com/apache/beam/actions/workflows/beam_CancelStaleDataflowJobs.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CancelStaleDataflowJobs.yml?query=event%3Aschedule) | -| [ Clean Up GCP Resources ](https://github.com/apache/beam/actions/workflows/beam_CleanUpGCPResources.yml) | N/A | `Run Clean GCP Resources` | [![.github/workflows/beam_CleanUpGCPResources.yml](https://github.com/apache/beam/actions/workflows/beam_CleanUpGCPResources.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CleanUpGCPResources.yml?query=event%3Aschedule) | -| [ Clean Up Prebuilt SDK Images ](https://github.com/apache/beam/actions/workflows/beam_CleanUpPrebuiltSDKImages.yml) | N/A | `Run Clean Prebuilt Images` | [![.github/workflows/beam_beam_CleanUpPrebuiltSDKImages.yml](https://github.com/apache/beam/actions/workflows/beam_CleanUpPrebuiltSDKImages.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CleanUpPrebuiltSDKImages.yml?query=event%3Aschedule) | -| [ Cleanup Dataproc Resources ](https://github.com/apache/beam/actions/workflows/beam_CleanUpDataprocResources.yml) | N/A | N/A | [![.github/workflows/beam_CleanUpDataprocResources.yml](https://github.com/apache/beam/actions/workflows/beam_CleanUpDataprocResources.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CleanUpDataprocResources.yml?query=event%3Aschedule) -| [ Community Metrics Prober ](https://github.com/apache/beam/actions/workflows/beam_Prober_CommunityMetrics.yml) | N/A |`Run Community Metrics Prober`| [![.github/workflows/beam_Prober_CommunityMetrics.yml](https://github.com/apache/beam/actions/workflows/beam_Prober_CommunityMetrics.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Prober_CommunityMetrics.yml?query=event%3Aschedule) -| [ Publish Beam SDK Snapshots ](https://github.com/apache/beam/actions/workflows/beam_Publish_Beam_SDK_Snapshots.yml) | N/A | N/A | [![.github/workflows/beam_Publish_Beam_SDK_Snapshots.yml](https://github.com/apache/beam/actions/workflows/beam_Publish_Beam_SDK_Snapshots.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Publish_Beam_SDK_Snapshots.yml?query=event%3Aschedule) | -| [ Publish Docker Snapshots ](https://github.com/apache/beam/actions/workflows/beam_Publish_Docker_Snapshots.yml) | N/A |`Publish Docker Snapshots`| [![.github/workflows/beam_Publish_Docker_Snapshots.yml](https://github.com/apache/beam/actions/workflows/beam_Publish_Docker_Snapshots.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Publish_Docker_Snapshots.yml?query=event%3Aschedule) | -| [ Rotate IO-Datastores Cluster Credentials ](https://github.com/apache/beam/actions/workflows/beam_IODatastoresCredentialsRotation.yml) | N/A | N/A | [![.github/workflows/beam_IODatastoresCredentialsRotation.yml](https://github.com/apache/beam/actions/workflows/beam_IODatastoresCredentialsRotation.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_IODatastoresCredentialsRotation.yml?query=event%3Aschedule) | -| [ Rotate Metrics Cluster Credentials ](https://github.com/apache/beam/actions/workflows/beam_MetricsCredentialsRotation.yml) | N/A | N/A | [![.github/workflows/beam_MetricsCredentialsRotation.yml](https://github.com/apache/beam/actions/workflows/beam_MetricsCredentialsRotation.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_MetricsCredentialsRotation.yml?query=event%3Aschedule) | +| Workflow name | Matrix | Cron Status | +|:-------------:|:------:|:-----------:| +| [ Publish BeamMetrics ](https://github.com/apache/beam/actions/workflows/beam_Publish_BeamMetrics.yml) | N/A | [![.github/workflows/beam_Publish_BeamMetrics.yml](https://github.com/apache/beam/actions/workflows/beam_Publish_BeamMetrics.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Publish_BeamMetrics.yml?query=event%3Aschedule) +| [ Publish Website ](https://github.com/apache/beam/actions/workflows/beam_Publish_Website.yml) | N/A | [![.github/workflows/beam_Publish_Website.yml](https://github.com/apache/beam/actions/workflows/beam_Publish_Website.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Publish_Website.yml?query=event%3Aschedule) | +| [ Cancel Stale Dataflow Jobs ](https://github.com/apache/beam/actions/workflows/beam_CancelStaleDataflowJobs.yml) | N/A | [![.github/workflows/beam_CancelStaleDataflowJobs.yml](https://github.com/apache/beam/actions/workflows/beam_CancelStaleDataflowJobs.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CancelStaleDataflowJobs.yml?query=event%3Aschedule) | +| [ Clean Up GCP Resources ](https://github.com/apache/beam/actions/workflows/beam_CleanUpGCPResources.yml) | N/A | [![.github/workflows/beam_CleanUpGCPResources.yml](https://github.com/apache/beam/actions/workflows/beam_CleanUpGCPResources.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CleanUpGCPResources.yml?query=event%3Aschedule) | +| [ Clean Up Prebuilt SDK Images ](https://github.com/apache/beam/actions/workflows/beam_CleanUpPrebuiltSDKImages.yml) | N/A | [![.github/workflows/beam_beam_CleanUpPrebuiltSDKImages.yml](https://github.com/apache/beam/actions/workflows/beam_CleanUpPrebuiltSDKImages.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CleanUpPrebuiltSDKImages.yml?query=event%3Aschedule) | +| [ Cleanup Dataproc Resources ](https://github.com/apache/beam/actions/workflows/beam_CleanUpDataprocResources.yml) | N/A | [![.github/workflows/beam_CleanUpDataprocResources.yml](https://github.com/apache/beam/actions/workflows/beam_CleanUpDataprocResources.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_CleanUpDataprocResources.yml?query=event%3Aschedule) +| [ Community Metrics Prober ](https://github.com/apache/beam/actions/workflows/beam_Prober_CommunityMetrics.yml) | N/A | [![.github/workflows/beam_Prober_CommunityMetrics.yml](https://github.com/apache/beam/actions/workflows/beam_Prober_CommunityMetrics.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Prober_CommunityMetrics.yml?query=event%3Aschedule) +| [ Publish Beam SDK Snapshots ](https://github.com/apache/beam/actions/workflows/beam_Publish_Beam_SDK_Snapshots.yml) | N/A | [![.github/workflows/beam_Publish_Beam_SDK_Snapshots.yml](https://github.com/apache/beam/actions/workflows/beam_Publish_Beam_SDK_Snapshots.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Publish_Beam_SDK_Snapshots.yml?query=event%3Aschedule) | +| [ Publish Docker Snapshots ](https://github.com/apache/beam/actions/workflows/beam_Publish_Docker_Snapshots.yml) | N/A | [![.github/workflows/beam_Publish_Docker_Snapshots.yml](https://github.com/apache/beam/actions/workflows/beam_Publish_Docker_Snapshots.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Publish_Docker_Snapshots.yml?query=event%3Aschedule) | +| [ Rotate IO-Datastores Cluster Credentials ](https://github.com/apache/beam/actions/workflows/beam_IODatastoresCredentialsRotation.yml) | N/A | [![.github/workflows/beam_IODatastoresCredentialsRotation.yml](https://github.com/apache/beam/actions/workflows/beam_IODatastoresCredentialsRotation.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_IODatastoresCredentialsRotation.yml?query=event%3Aschedule) | +| [ Rotate Metrics Cluster Credentials ](https://github.com/apache/beam/actions/workflows/beam_MetricsCredentialsRotation.yml) | N/A | [![.github/workflows/beam_MetricsCredentialsRotation.yml](https://github.com/apache/beam/actions/workflows/beam_MetricsCredentialsRotation.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_MetricsCredentialsRotation.yml?query=event%3Aschedule) | diff --git a/.github/workflows/beam_Java_JMH.yml b/.github/workflows/beam_Java_JMH.yml index ccd3162873ad..7764e7223472 100644 --- a/.github/workflows/beam_Java_JMH.yml +++ b/.github/workflows/beam_Java_JMH.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 0 * * 0' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_Java_JMH.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -49,6 +49,8 @@ env: GRADLE_ENTERPRISE_CACHE_PASSWORD: ${{ secrets.GE_CACHE_PASSWORD }} INFLUXDB_HOST: http://10.128.0.96:8086 INFLUXDB_DATABASE: beam_test_metrics + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_Java_JMH: diff --git a/.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml b/.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml index ffb38e34a454..881d5dedaac5 100644 --- a/.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Java_CoGBK_Dataflow_Streaming.yml @@ -115,8 +115,8 @@ jobs: -Prunner=:runners:google-cloud-dataflow-java \ '-PloadTest.args=${{ env.beam_LoadTests_Java_CoGBK_Dataflow_Streaming_test_arguments_4 }}' \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_Metrics_Report.yml b/.github/workflows/beam_Metrics_Report.yml index b07e64b12cfc..3ed5850e5dba 100644 --- a/.github/workflows/beam_Metrics_Report.yml +++ b/.github/workflows/beam_Metrics_Report.yml @@ -74,7 +74,7 @@ jobs: INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} - name: Archive Report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Metrics Report path: "${{ github.workspace }}/.test-infra/jenkins/metrics_report/beam-metrics_report.html" diff --git a/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml b/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml index 8727b2387403..8b9eefcd8052 100644 --- a/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml +++ b/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Avro.yml @@ -91,8 +91,8 @@ jobs: -DintegrationTestRunner=dataflow \ -DintegrationTestPipelineOptions='[${{ env.beam_PerformanceTests_BigQueryIO_Batch_Java_Avro_test_arguments_1 }}]' \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml b/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml index a231d00f5ede..1fb81b9b1431 100644 --- a/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml +++ b/.github/workflows/beam_PerformanceTests_BigQueryIO_Batch_Java_Json.yml @@ -91,8 +91,8 @@ jobs: -DintegrationTestRunner=dataflow \ -DintegrationTestPipelineOptions='[${{ env.beam_PerformanceTests_BigQueryIO_Batch_Java_Json_test_arguments_1 }}]' \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml b/.github/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml index 7259fb1838f2..c0433043f1ed 100644 --- a/.github/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml +++ b/.github/workflows/beam_PerformanceTests_BigQueryIO_Streaming_Java.yml @@ -91,8 +91,8 @@ jobs: -DintegrationTestRunner=dataflow \ -DintegrationTestPipelineOptions='[${{ env.beam_PerformanceTests_BigQueryIO_Streaming_Java_test_arguments_1 }}]' \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml b/.github/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml index c02d16688a69..dbe495f12cd1 100644 --- a/.github/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml +++ b/.github/workflows/beam_PerformanceTests_SQLBigQueryIO_Batch_Java.yml @@ -90,8 +90,8 @@ jobs: -DintegrationTestRunner=dataflow \ '-DintegrationTestPipelineOptions=[${{env.beam_PerformanceTests_SQLBigQueryIO_Batch_Java_test_arguments_1}}]' \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml b/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml index b3e606fcc390..b814e5d99924 100644 --- a/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml +++ b/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml @@ -104,7 +104,7 @@ jobs: -Ptest=apache_beam/examples/wordcount_it_test.py::WordCountIT::test_wordcount_it \ "-Ptest-pipeline-options=${{ env.beam_PerformanceTests_WordCountIT_PythonVersions_test_arguments_1 }}" - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Go.yml b/.github/workflows/beam_PostCommit_Go.yml index f9da27ff2ce9..821ec55e2d1b 100644 --- a/.github/workflows/beam_PostCommit_Go.yml +++ b/.github/workflows/beam_PostCommit_Go.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Go.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event diff --git a/.github/workflows/beam_PostCommit_Go_Dataflow_ARM.yml b/.github/workflows/beam_PostCommit_Go_Dataflow_ARM.yml index 532c48e7b072..615b7f1f94b2 100644 --- a/.github/workflows/beam_PostCommit_Go_Dataflow_ARM.yml +++ b/.github/workflows/beam_PostCommit_Go_Dataflow_ARM.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Go_Dataflow_ARM.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event diff --git a/.github/workflows/beam_PostCommit_Go_VR_Flink.yml b/.github/workflows/beam_PostCommit_Go_VR_Flink.yml index ce1e82d22e2a..c85f0c00035a 100644 --- a/.github/workflows/beam_PostCommit_Go_VR_Flink.yml +++ b/.github/workflows/beam_PostCommit_Go_VR_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Go_VR_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event diff --git a/.github/workflows/beam_PostCommit_Go_VR_Samza.yml b/.github/workflows/beam_PostCommit_Go_VR_Samza.yml index 5d1ab6a8d13e..3ac3742500fc 100644 --- a/.github/workflows/beam_PostCommit_Go_VR_Samza.yml +++ b/.github/workflows/beam_PostCommit_Go_VR_Samza.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Go_VR_Samza.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event diff --git a/.github/workflows/beam_PostCommit_Go_VR_Spark.yml b/.github/workflows/beam_PostCommit_Go_VR_Spark.yml index ed9f50583133..d85038befe38 100644 --- a/.github/workflows/beam_PostCommit_Go_VR_Spark.yml +++ b/.github/workflows/beam_PostCommit_Go_VR_Spark.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Go_VR_Spark.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event diff --git a/.github/workflows/beam_PostCommit_Java.yml b/.github/workflows/beam_PostCommit_Java.yml index d9fa4d52f965..cc7d1c7d64ad 100644 --- a/.github/workflows/beam_PostCommit_Java.yml +++ b/.github/workflows/beam_PostCommit_Java.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: :javaPostCommit - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Avro_Versions.yml b/.github/workflows/beam_PostCommit_Java_Avro_Versions.yml index 36bdec265dd5..313f5c1340a2 100644 --- a/.github/workflows/beam_PostCommit_Java_Avro_Versions.yml +++ b/.github/workflows/beam_PostCommit_Java_Avro_Versions.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Avro_Versions.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: :javaAvroVersionsTest - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_BigQueryEarlyRollout.yml b/.github/workflows/beam_PostCommit_Java_BigQueryEarlyRollout.yml index f21f301cc265..f832c5fab562 100644 --- a/.github/workflows/beam_PostCommit_Java_BigQueryEarlyRollout.yml +++ b/.github/workflows/beam_PostCommit_Java_BigQueryEarlyRollout.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_BigQueryEarlyRollout.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -86,8 +86,8 @@ jobs: with: gradle-command: :sdks:java:io:google-cloud-platform:bigQueryEarlyRolloutIntegrationTest - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_DataflowV1.yml b/.github/workflows/beam_PostCommit_Java_DataflowV1.yml index 68c901c2e527..6e31094fa468 100644 --- a/.github/workflows/beam_PostCommit_Java_DataflowV1.yml +++ b/.github/workflows/beam_PostCommit_Java_DataflowV1.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_DataflowV1.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -83,8 +83,8 @@ jobs: with: gradle-command: :runners:google-cloud-dataflow-java:postCommit - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_DataflowV2.yml b/.github/workflows/beam_PostCommit_Java_DataflowV2.yml index 90a97296df77..58ecc37361d4 100644 --- a/.github/workflows/beam_PostCommit_Java_DataflowV2.yml +++ b/.github/workflows/beam_PostCommit_Java_DataflowV2.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_DataflowV2.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: :runners:google-cloud-dataflow-java:postCommitRunnerV2 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow.yml b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow.yml index 488031a9244a..183a55a8e742 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Dataflow.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -80,8 +80,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:examples:javaPostCommit max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml index 26ea487e0dcf..33557dc6d2d1 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_ARM.yml @@ -23,7 +23,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Dataflow_ARM.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -113,8 +113,8 @@ jobs: -PdisableCheckStyle=true \ -PskipCheckerFramework \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml index 747c9f0983fc..f9e828106bf5 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_Java.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Dataflow_Java.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -86,8 +86,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:examples:java${{ matrix.java_version }}PostCommit max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml index ae50ec506dff..661dd2dde2b1 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Dataflow_V2.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -80,8 +80,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:examplesJavaRunnerV2IntegrationTest max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml index 1ed3cf5eb981..18fa9513ac63 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Dataflow_V2_Java.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Dataflow_V2_Java.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -94,8 +94,8 @@ jobs: -PtestJavaVersion=${{ matrix.java_version }} \ -Pjava${{ matrix.java_version }}Home=$JAVA_HOME_${{ matrix.java_version }}_X64 \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Direct.yml b/.github/workflows/beam_PostCommit_Java_Examples_Direct.yml index fa7e76942808..34d17b4b20a9 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Direct.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Direct.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Direct.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -81,8 +81,8 @@ jobs: with: gradle-command: :runners:direct:examplesIntegrationTest - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Flink.yml b/.github/workflows/beam_PostCommit_Java_Examples_Flink.yml index 8e7a99f12ac8..3131422fe732 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Flink.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Flink.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Flink.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -82,8 +82,8 @@ jobs: with: gradle-command: :runners:flink:1.15:examplesIntegrationTest - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Examples_Spark.yml b/.github/workflows/beam_PostCommit_Java_Examples_Spark.yml index f927fa8b3b46..4bc8408f9a4d 100644 --- a/.github/workflows/beam_PostCommit_Java_Examples_Spark.yml +++ b/.github/workflows/beam_PostCommit_Java_Examples_Spark.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Examples_Spark.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -81,8 +81,8 @@ jobs: with: gradle-command: :runners:spark:3:examplesIntegrationTest - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Hadoop_Versions.yml b/.github/workflows/beam_PostCommit_Java_Hadoop_Versions.yml index c0c85fb0cd31..a227144e6570 100644 --- a/.github/workflows/beam_PostCommit_Java_Hadoop_Versions.yml +++ b/.github/workflows/beam_PostCommit_Java_Hadoop_Versions.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 3/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Hadoop_Versions.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -79,8 +79,8 @@ jobs: with: gradle-command: :javaHadoopVersionsTest - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml b/.github/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml index 4bbc23b13de4..588da4ea61a2 100644 --- a/.github/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml +++ b/.github/workflows/beam_PostCommit_Java_IO_Performance_Tests.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_IO_Performance_Tests.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -107,8 +107,8 @@ jobs: exportDataset: performance_tests exportTable: io_performance_metrics_test - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml b/.github/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml index c1f75be3851a..0bfbdedadcca 100644 --- a/.github/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml +++ b/.github/workflows/beam_PostCommit_Java_InfluxDbIO_IT.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_InfluxDbIO_IT.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml index 2449054a1073..0f39e0d02537 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java11.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Dataflow_Java11.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -80,8 +80,8 @@ jobs: gradle-command: :sdks:java:testing:jpms-tests:dataflowRunnerIntegrationTest arguments: -Dorg.gradle.java.home=$JAVA_HOME_11_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml index 611c8a9d31de..049b5e27e341 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Dataflow_Java17.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Dataflow_Java17.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -85,8 +85,8 @@ jobs: -PtestJavaVersion=17 -Pjava17Home=$JAVA_HOME_17_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml index 6fdff9c451eb..73750453e5e3 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java11.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Direct_Java11.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -80,8 +80,8 @@ jobs: gradle-command: :sdks:java:testing:jpms-tests:directRunnerIntegrationTest arguments: -Dorg.gradle.java.home=$JAVA_HOME_11_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml index 3f62861eb025..2a56f3e96201 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java17.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Direct_Java17.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -85,8 +85,8 @@ jobs: -PtestJavaVersion=17 -Pjava17Home=$JAVA_HOME_17_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml index e1926a430069..163169563047 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Direct_Java21.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 */6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Direct_Java21.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -71,7 +71,7 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} github_job: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) - name: Set up Java - uses: actions/setup-java@v3.11.0 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: | @@ -86,8 +86,8 @@ jobs: -PtestJavaVersion=21 -Pjava21Home=$JAVA_HOME_21_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml index fbc0d16e4994..ea8a89def8ee 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Flink_Java11.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Flink_Java11.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -80,8 +80,8 @@ jobs: gradle-command: :sdks:java:testing:jpms-tests:flinkRunnerIntegrationTest arguments: -Dorg.gradle.java.home=$JAVA_HOME_11_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml b/.github/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml index 2c2e2acc6f19..a8754ac254d8 100644 --- a/.github/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml +++ b/.github/workflows/beam_PostCommit_Java_Jpms_Spark_Java11.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Jpms_Spark_Java11.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -80,8 +80,8 @@ jobs: gradle-command: :sdks:java:testing:jpms-tests:sparkRunnerIntegrationTest arguments: -Dorg.gradle.java.home=$JAVA_HOME_11_X64 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml b/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml index f4e96961061e..ab4c63e772ea 100644 --- a/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Nexmark_Dataflow.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -72,6 +72,8 @@ env: --enforceEncodability=true --enforceImmutability=true --runner=DataflowRunner + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_PostCommit_Java_Nexmark_Dataflow: diff --git a/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml b/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml index 875f54ea7632..63321d8aa6c1 100644 --- a/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml +++ b/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Nexmark_Dataflow_V2.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -73,6 +73,8 @@ env: --enforceEncodability=true --enforceImmutability=true --runner=DataflowRunner + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_PostCommit_Java_Nexmark_Dataflow_V2: diff --git a/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml b/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml index ba3f8bb61005..1a4c38c46914 100644 --- a/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml +++ b/.github/workflows/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '0 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Nexmark_Dataflow_V2_Java.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -72,6 +72,8 @@ env: --enforceEncodability=true --enforceImmutability=true --runner=DataflowRunner + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_PostCommit_Java_Nexmark_Dataflow_V2_Java: diff --git a/.github/workflows/beam_PostCommit_Java_Nexmark_Direct.yml b/.github/workflows/beam_PostCommit_Java_Nexmark_Direct.yml index 23d766c89823..5e1975b9ae83 100644 --- a/.github/workflows/beam_PostCommit_Java_Nexmark_Direct.yml +++ b/.github/workflows/beam_PostCommit_Java_Nexmark_Direct.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Nexmark_Direct.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -67,6 +67,8 @@ env: --enforceEncodability=true --enforceImmutability=true --runner=DirectRunner + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_PostCommit_Java_Nexmark_Direct: diff --git a/.github/workflows/beam_PostCommit_Java_Nexmark_Flink.yml b/.github/workflows/beam_PostCommit_Java_Nexmark_Flink.yml index dd87fec145a4..405a6399d2e8 100644 --- a/.github/workflows/beam_PostCommit_Java_Nexmark_Flink.yml +++ b/.github/workflows/beam_PostCommit_Java_Nexmark_Flink.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Nexmark_Flink.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -66,6 +66,8 @@ env: --suite=SMOKE --streamTimeout=60 --runner=FlinkRunner + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_PostCommit_Java_Nexmark_Flink: diff --git a/.github/workflows/beam_PostCommit_Java_Nexmark_Spark.yml b/.github/workflows/beam_PostCommit_Java_Nexmark_Spark.yml index 05229fb4c89b..29291f441ce0 100644 --- a/.github/workflows/beam_PostCommit_Java_Nexmark_Spark.yml +++ b/.github/workflows/beam_PostCommit_Java_Nexmark_Spark.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Nexmark_Spark.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -66,6 +66,8 @@ env: --suite=SMOKE --streamTimeout=60 --streaming=false + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} jobs: beam_PostCommit_Java_Nexmark_Spark: diff --git a/.github/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml b/.github/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml index cffe074f58eb..13d3384f8087 100644 --- a/.github/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml +++ b/.github/workflows/beam_PostCommit_Java_PVR_Flink_Streaming.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_PVR_Flink_Streaming.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: runners:flink:1.15:job-server:validatesPortableRunnerStreaming - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_PVR_Samza.yml b/.github/workflows/beam_PostCommit_Java_PVR_Samza.yml index 729e95fe5219..86647b6079d3 100644 --- a/.github/workflows/beam_PostCommit_Java_PVR_Samza.yml +++ b/.github/workflows/beam_PostCommit_Java_PVR_Samza.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_PVR_Samza.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -81,8 +81,8 @@ jobs: with: gradle-command: :runners:samza:job-server:validatesPortableRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml b/.github/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml index d3508e968fe0..58ee88e20ec3 100644 --- a/.github/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml +++ b/.github/workflows/beam_PostCommit_Java_PVR_Spark3_Streaming.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_PVR_Spark3_Streaming.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: :runners:spark:3:job-server:validatesPortableRunnerStreaming - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml b/.github/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml index 8cc977ddea82..a6061da45e92 100644 --- a/.github/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml +++ b/.github/workflows/beam_PostCommit_Java_PVR_Spark_Batch.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_PVR_Spark_Batch.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -82,8 +82,8 @@ jobs: :runners:spark:3:job-server:validatesPortableRunnerBatch \ :runners:spark:3:job-server:validatesPortableRunnerDocker \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -96,7 +96,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: SpotBugs Results path: "**/build/reports/spotbugs/*.html" diff --git a/.github/workflows/beam_PostCommit_Java_Sickbay.yml b/.github/workflows/beam_PostCommit_Java_Sickbay.yml index e6de4b2538b5..2e96ae63382c 100644 --- a/.github/workflows/beam_PostCommit_Java_Sickbay.yml +++ b/.github/workflows/beam_PostCommit_Java_Sickbay.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Sickbay.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: :javaPostCommitSickbay - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml b/.github/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml index 2fff2365d817..ddbbfa3e5d42 100644 --- a/.github/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml +++ b/.github/workflows/beam_PostCommit_Java_SingleStoreIO_IT.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_SingleStoreIO_IT.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event permissions: diff --git a/.github/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml b/.github/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml index cc04e5e3b493..9704b7f03d61 100644 --- a/.github/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Java_Tpcds_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Tpcds_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -57,7 +57,9 @@ env: --sourceType=PARQUET --dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned --resultsDirectory=gs://beam-tpcds/results/dataflow/ - --tpcParallel=1 + --tpcParallel=1 + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} tpcdsBigQueryArgs: | --bigQueryTable=tpcds --bigQueryDataset=tpcds diff --git a/.github/workflows/beam_PostCommit_Java_Tpcds_Flink.yml b/.github/workflows/beam_PostCommit_Java_Tpcds_Flink.yml index 55f7b65c9b5d..820a4c9792cc 100644 --- a/.github/workflows/beam_PostCommit_Java_Tpcds_Flink.yml +++ b/.github/workflows/beam_PostCommit_Java_Tpcds_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Tpcds_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -55,6 +55,8 @@ env: --dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned --resultsDirectory=gs://beam-tpcds/results/flink/ --tpcParallel=1 + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} tpcdsBigQueryArgs: | --bigQueryTable=tpcds --bigQueryDataset=tpcds diff --git a/.github/workflows/beam_PostCommit_Java_Tpcds_Spark.yml b/.github/workflows/beam_PostCommit_Java_Tpcds_Spark.yml index 8bf21480f621..f39feaf64a1b 100644 --- a/.github/workflows/beam_PostCommit_Java_Tpcds_Spark.yml +++ b/.github/workflows/beam_PostCommit_Java_Tpcds_Spark.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_Tpcds_Spark.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -48,12 +48,13 @@ env: GRADLE_ENTERPRISE_CACHE_USERNAME: ${{ secrets.GE_CACHE_USERNAME }} GRADLE_ENTERPRISE_CACHE_PASSWORD: ${{ secrets.GE_CACHE_PASSWORD }} GRADLE_COMMAND_ARGUMENTS: | - --runner=SparkRunner --dataSize=1GB --sourceType=PARQUET --dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned --resultsDirectory=gs://beam-tpcds/results/spark3-rdd/ --tpcParallel=1 + INFLUXDB_USER: ${{ secrets.INFLUXDB_USER }} + INFLUXDB_USER_PASSWORD: ${{ secrets.INFLUXDB_USER_PASSWORD }} tpcdsBigQueryArgs: | --bigQueryTable=tpcds --bigQueryDataset=tpcds @@ -78,11 +79,12 @@ jobs: github.event.comment.body == 'Run Spark Runner Tpcds Tests' runs-on: [self-hosted, ubuntu-20.04, main] timeout-minutes: 240 - name: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) + name: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) ${{ matrix.runner }} strategy: matrix: job_name: ["beam_PostCommit_Java_Tpcds_Spark"] job_phrase: ["Run Spark Runner Tpcds Tests"] + runner: [SparkRunner, SparkStructuredStreamingRunner] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -90,13 +92,13 @@ jobs: with: comment_phrase: ${{ matrix.job_phrase }} github_token: ${{ secrets.GITHUB_TOKEN }} - github_job: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) + github_job: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) ${{ matrix.runner }} - name: Setup environment uses: ./.github/actions/setup-environment-action - - name: run PostCommit Java Tpcds Spark script + - name: run PostCommit Java Tpcds Spark (${{ matrix.runner }}) script uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :sdks:java:testing:tpcds:run arguments: | -Ptpcds.runner=:runners:spark:3 \ - "-Ptpcds.args=${{env.tpcdsBigQueryArgs}} ${{env.tpcdsInfluxDBArgs}} ${{ env.GRADLE_COMMAND_ARGUMENTS }} --queries=${{env.tpcdsQueriesArg}}" \ + "-Ptpcds.args=${{env.tpcdsBigQueryArgs}} ${{env.tpcdsInfluxDBArgs}} ${{ env.GRADLE_COMMAND_ARGUMENTS }} --runner=${{ matrix.runner }} --queries=${{env.tpcdsQueriesArg}}" \ diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml index 338b148ff3fb..d34847d5340f 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow.yml @@ -19,9 +19,9 @@ name: PostCommit Java ValidatesRunner Dataflow on: schedule: - - cron: '30 4/6 * * *' + - cron: '30 4/8 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Dataflow.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -82,8 +82,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:validatesRunner max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml index 86b6b32a4abb..91a55dc14007 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.yml @@ -19,9 +19,9 @@ name: PostCommit Java ValidatesRunner Dataflow JavaVersions on: schedule: - - cron: '30 4/6 * * *' + - cron: '30 4/8 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Dataflow_JavaVersions.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -100,8 +100,8 @@ jobs: -Dorg.gradle.java.home=$JAVA_HOME_${{ matrix.java_version }}_X64 \ max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml index abe21ac3f783..e10a4db9abf3 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.yml @@ -19,9 +19,9 @@ name: PostCommit Java ValidatesRunner Dataflow Streaming on: schedule: - - cron: '30 4/6 * * *' + - cron: '30 4/8 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -82,8 +82,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:validatesRunnerStreaming max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml index 63625b48ea2f..adb5c9f9c777 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 6/8 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -82,8 +82,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:validatesRunnerV2 max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml index b372c4c2acda..2024de57a1c6 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 6/8 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -82,8 +82,8 @@ jobs: gradle-command: :runners:google-cloud-dataflow-java:validatesRunnerV2Streaming max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml index 16e21c9faa26..1d6eb82d9045 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Direct.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: - name: run validatesRunner script run: ./gradlew :runners:direct-java:validatesRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml index fafc821a2b20..da9ab6419ec6 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Direct_JavaVersions.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -95,8 +95,8 @@ jobs: -x compileTestJava \ -Dorg.gradle.java.home=$JAVA_HOME_${{ matrix.java_version }}_X64 \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml index 8171760f5528..22fd277470c8 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -80,8 +80,8 @@ jobs: with: gradle-command: :runners:flink:1.15:validatesRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml index 6bc1b0ffa03c..764211e26484 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Flink_Java11.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -98,8 +98,8 @@ jobs: -Dorg.gradle.java.home=$JAVA_HOME_11_X64 \ max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml index 8f8993f3108e..76136e8038d2 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Samza.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Samza.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -79,8 +79,8 @@ jobs: with: gradle-command: :runners:samza:validatesRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml index ce67510b1e50..b4e4e25fa4bd 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Spark.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -79,8 +79,8 @@ jobs: with: gradle-command: :runners:spark:3:validatesRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml index 7883218d262a..55e3c8a9ea7a 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -79,8 +79,8 @@ jobs: with: gradle-command: :runners:spark:3:validatesStructuredStreamingRunnerBatch - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml index 099f9ab1e724..510c797fef36 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Spark_Java11.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -98,8 +98,8 @@ jobs: -Dorg.gradle.java.home=$JAVA_HOME_11_X64 \ max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml index de3d4914052e..d3c80473dd2e 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_Twister2.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_Twister2.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -79,8 +79,8 @@ jobs: with: gradle-command: :runners:twister2:validatesRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml index eb2139c562e9..35d5ac738d64 100644 --- a/.github/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml +++ b/.github/workflows/beam_PostCommit_Java_ValidatesRunner_ULR.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 4/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Java_ValidatesRunner_ULR.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -78,8 +78,8 @@ jobs: - name: run ulrLoopbackValidatesRunner script run: ./gradlew :runners:portability:java:ulrLoopbackValidatesRunner - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Javadoc.yml b/.github/workflows/beam_PostCommit_Javadoc.yml index 8e2a170dba63..61af8b12d2a4 100644 --- a/.github/workflows/beam_PostCommit_Javadoc.yml +++ b/.github/workflows/beam_PostCommit_Javadoc.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Javadoc.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -84,7 +84,7 @@ jobs: path-to-root: sdks/java/javadoc/build/docs/javadoc base-url-path: https://beam.apache.org/releases/javadoc/current/ - name: Upload Javadoc Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: Javadoc Results path: '**/sdks/java/javadoc/build/docs/javadoc/**' \ No newline at end of file diff --git a/.github/workflows/beam_PostCommit_PortableJar_Flink.yml b/.github/workflows/beam_PostCommit_PortableJar_Flink.yml index 2f066979681e..2c0a2f0c49e9 100644 --- a/.github/workflows/beam_PostCommit_PortableJar_Flink.yml +++ b/.github/workflows/beam_PostCommit_PortableJar_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_PortableJar_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -83,7 +83,7 @@ jobs: arguments: | -PpythonVersion=3.8 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_PortableJar_Spark.yml b/.github/workflows/beam_PostCommit_PortableJar_Spark.yml index 8b5bc031f7fa..d15c846cf501 100644 --- a/.github/workflows/beam_PostCommit_PortableJar_Spark.yml +++ b/.github/workflows/beam_PostCommit_PortableJar_Spark.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_PortableJar_Spark.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -83,7 +83,7 @@ jobs: arguments: | -PpythonVersion=3.8 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python.yml b/.github/workflows/beam_PostCommit_Python.yml index 0b22466f517b..80226779d4af 100644 --- a/.github/workflows/beam_PostCommit_Python.yml +++ b/.github/workflows/beam_PostCommit_Python.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -98,10 +98,10 @@ jobs: env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PostCommit_Python_Arm.yml b/.github/workflows/beam_PostCommit_Python_Arm.yml index 1a88c468a67c..7bd8d9cccc55 100644 --- a/.github/workflows/beam_PostCommit_Python_Arm.yml +++ b/.github/workflows/beam_PostCommit_Python_Arm.yml @@ -23,7 +23,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Arm.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -114,10 +114,10 @@ jobs: MULTIARCH_TAG: ${{ steps.set_tag.outputs.TAG }} USER: github-actions - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml index ae07cb0273e0..fa106bbe8a8f 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Examples_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -83,7 +83,7 @@ jobs: -PuseWheelDistribution \ -PpythonVersion=3.11 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml b/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml index b4b620e5dd1f..1d778c4cad47 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Examples_Direct.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -90,7 +90,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml b/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml index 1f334f6a9963..f7bc434a2055 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Examples_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -90,7 +90,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml b/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml index 6a33c63f24a0..0ea616635257 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '0 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Examples_Spark.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -90,7 +90,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml b/.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml index 1c8daf4cffa3..49b337a8d6ae 100644 --- a/.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml +++ b/.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_MongoDBIO_IT.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -82,7 +82,7 @@ jobs: arguments: | -PpythonVersion=3.11 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml b/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml index f63461369be0..2ce1db22e102 100644 --- a/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml +++ b/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Nexmark_Direct.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml index 196dd1eaa84e..bb3bae69806c 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_ValidatesContainer_Dataflow.json'] workflow_dispatch: issue_comment: types: [created] @@ -96,7 +96,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml index ac2179dd2521..c13df624e7af 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -95,7 +95,7 @@ jobs: -PtestRCDependencies=true \ -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml index 79bc303f1117..55e3a80e93ee 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_ValidatesRunner_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -98,7 +98,7 @@ jobs: -PuseWheelDistribution \ -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml index b403f76b9f9e..adff5bd94535 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_ValidatesRunner_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -92,7 +92,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml index 4229304278c0..286d294e3e5a 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_ValidatesRunner_Samza.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -90,7 +90,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml index 06db87f8fb76..1324325ab70f 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_ValidatesRunner_Spark.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -90,7 +90,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml index dd899a538e9a..e5a2c1b39134 100644 --- a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '15 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Xlang_Gcp_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -82,7 +82,7 @@ jobs: gradle-command: :sdks:python:test-suites:dataflow:gcpCrossLanguagePostCommit arguments: -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml index 33eb748a2f84..d4e57997da70 100644 --- a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml +++ b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Xlang_Gcp_Direct.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -81,7 +81,7 @@ jobs: with: gradle-command: :sdks:python:test-suites:direct:gcpCrossLanguagePostCommit - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml index 4d71e507fe32..66402fad0b8d 100644 --- a/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Python_Xlang_IO_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -84,7 +84,7 @@ jobs: -PuseWheelDistribution \ -PkafkaBootstrapServer=10.128.0.40:9094,10.128.0.28:9094,10.128.0.165:9094 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_SQL.yml b/.github/workflows/beam_PostCommit_SQL.yml index eae7d4374d5b..4466633552fb 100644 --- a/.github/workflows/beam_PostCommit_SQL.yml +++ b/.github/workflows/beam_PostCommit_SQL.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_SQL.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -79,8 +79,8 @@ jobs: with: gradle-command: :sqlPostCommit - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_Sickbay_Python.yml b/.github/workflows/beam_PostCommit_Sickbay_Python.yml index b4d054f07a3d..7ebdbcb38416 100644 --- a/.github/workflows/beam_PostCommit_Sickbay_Python.yml +++ b/.github/workflows/beam_PostCommit_Sickbay_Python.yml @@ -21,7 +21,7 @@ on: schedule: - cron: '0 8 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Sickbay_Python.json'] workflow_dispatch: # This allows a subsequently queued workflow run to interrupt previous runs @@ -93,7 +93,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_TransformService_Direct.yml b/.github/workflows/beam_PostCommit_TransformService_Direct.yml index e40112f0c5b1..0cb4352c2fbb 100644 --- a/.github/workflows/beam_PostCommit_TransformService_Direct.yml +++ b/.github/workflows/beam_PostCommit_TransformService_Direct.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_TransformService_Direct.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -88,7 +88,7 @@ jobs: -PuseWheelDistribution \ -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_Website_Test.yml b/.github/workflows/beam_PostCommit_Website_Test.yml index ba372d223281..95e49faf13a5 100644 --- a/.github/workflows/beam_PostCommit_Website_Test.yml +++ b/.github/workflows/beam_PostCommit_Website_Test.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_Website_Test.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event diff --git a/.github/workflows/beam_PostCommit_XVR_Direct.yml b/.github/workflows/beam_PostCommit_XVR_Direct.yml index ee90f9176f6d..de1b995bc649 100644 --- a/.github/workflows/beam_PostCommit_XVR_Direct.yml +++ b/.github/workflows/beam_PostCommit_XVR_Direct.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_Direct.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -98,8 +98,8 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PskipNonPythonTask=false \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_XVR_Flink.yml b/.github/workflows/beam_PostCommit_XVR_Flink.yml index 00c756a714f3..134efdf0dfc3 100644 --- a/.github/workflows/beam_PostCommit_XVR_Flink.yml +++ b/.github/workflows/beam_PostCommit_XVR_Flink.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '30 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_Flink.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -99,8 +99,8 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PskipNonPythonTask=false \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml b/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml index aab8a0e0a84f..82d284a829ad 100644 --- a/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_XVR_GoUsingJava_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_GoUsingJava_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -74,6 +74,11 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: 3.8 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: GCloud Docker credential helper + run: | + gcloud auth configure-docker us.gcr.io - name: run XVR GoUsingJava Dataflow script env: USER: github-actions @@ -82,8 +87,8 @@ jobs: with: gradle-command: :runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerGoUsingJava - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -93,4 +98,4 @@ jobs: with: commit: '${{ env.prsha || env.GITHUB_SHA }}' comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} - files: '**/build/test-results/**/*.xml' \ No newline at end of file + files: '**/build/test-results/**/*.xml' diff --git a/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml b/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml index 113c51626078..02024805beb8 100644 --- a/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_JavaUsingPython_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -86,8 +86,8 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml b/.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml index 6759930d5de8..4908b34da18e 100644 --- a/.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -83,7 +83,7 @@ jobs: arguments: | -PpythonVersion=3.11 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml b/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml index dda068049a17..a4dd9a9abe1e 100644 --- a/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_PythonUsingJava_Dataflow.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -86,7 +86,7 @@ jobs: arguments: | -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PostCommit_XVR_Samza.yml b/.github/workflows/beam_PostCommit_XVR_Samza.yml index b05b588e0cf8..b56872dd778b 100644 --- a/.github/workflows/beam_PostCommit_XVR_Samza.yml +++ b/.github/workflows/beam_PostCommit_XVR_Samza.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_Samza.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -98,8 +98,8 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PskipNonPythonTask=false \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PostCommit_XVR_Spark3.yml b/.github/workflows/beam_PostCommit_XVR_Spark3.yml index 0742196a6960..68f9cb93b5f6 100644 --- a/.github/workflows/beam_PostCommit_XVR_Spark3.yml +++ b/.github/workflows/beam_PostCommit_XVR_Spark3.yml @@ -19,7 +19,7 @@ on: schedule: - cron: '45 5/6 * * *' pull_request_target: - paths: ['release/trigger_all_tests.json'] + paths: ['release/trigger_all_tests.json', '.github/trigger_files/beam_PostCommit_XVR_Spark3.json'] workflow_dispatch: #Setting explicit permissions for the action to avoid the default permissions which are `write-all` in case of pull_request_target event @@ -98,8 +98,8 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PskipNonPythonTask=false \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PreCommit_CommunityMetrics.yml b/.github/workflows/beam_PreCommit_CommunityMetrics.yml index 570dd5dc3354..778383a7f7db 100644 --- a/.github/workflows/beam_PreCommit_CommunityMetrics.yml +++ b/.github/workflows/beam_PreCommit_CommunityMetrics.yml @@ -22,7 +22,7 @@ on: paths: ['.test-infra/metrics/**', 'buildSrc/build.gradle.kts', '.github/workflows/beam_PreCommit_CommunityMetrics.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['.test-infra/metrics/**', 'buildSrc/build.gradle.kts', 'release/trigger_all_tests.json'] + paths: ['.test-infra/metrics/**', 'buildSrc/build.gradle.kts', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_CommunityMetrics.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_GHA.yml b/.github/workflows/beam_PreCommit_GHA.yml index 94811bc0f507..33edb12a14d5 100644 --- a/.github/workflows/beam_PreCommit_GHA.yml +++ b/.github/workflows/beam_PreCommit_GHA.yml @@ -22,7 +22,7 @@ on: paths: ['.github/**/*.yml'] pull_request_target: branches: ['master', 'release-*' ] - paths: ['.github/**/*.yml', 'release/trigger_all_tests.json'] + paths: ['.github/**/*.yml', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_GHA.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_Go.yml b/.github/workflows/beam_PreCommit_Go.yml index 7df1296ca124..2e268e21684e 100644 --- a/.github/workflows/beam_PreCommit_Go.yml +++ b/.github/workflows/beam_PreCommit_Go.yml @@ -22,7 +22,7 @@ on: paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', '.github/workflows/beam_PreCommit_Go.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', 'release/trigger_all_tests.json'] + paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Go.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_GoPortable.yml b/.github/workflows/beam_PreCommit_GoPortable.yml index 609340e8ded3..8d62e73d5b25 100644 --- a/.github/workflows/beam_PreCommit_GoPortable.yml +++ b/.github/workflows/beam_PreCommit_GoPortable.yml @@ -22,7 +22,7 @@ on: paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', '.github/workflows/beam_PreCommit_GoPortable.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', 'release/trigger_all_tests.json'] + paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_GoPortable.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_GoPrism.yml b/.github/workflows/beam_PreCommit_GoPrism.yml index 52c5553cd623..4af064f45c42 100644 --- a/.github/workflows/beam_PreCommit_GoPrism.yml +++ b/.github/workflows/beam_PreCommit_GoPrism.yml @@ -22,7 +22,7 @@ on: paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', '.github/workflows/beam_PreCommit_GoPrism.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', 'release/trigger_all_tests.json'] + paths: ['model/**', 'sdks/go.mod', 'sdks/go.sum', 'sdks/go/**', 'release/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_GoPrism.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_ItFramework.yml b/.github/workflows/beam_PreCommit_ItFramework.yml index cf4cd2980176..a883330140ba 100644 --- a/.github/workflows/beam_PreCommit_ItFramework.yml +++ b/.github/workflows/beam_PreCommit_ItFramework.yml @@ -29,6 +29,7 @@ on: paths: - 'it/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_ItFramework.json' issue_comment: types: [created] schedule: @@ -89,8 +90,8 @@ jobs: - name: run ItFrameworkPrecommit script run: ./gradlew -p it build - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PreCommit_Java.yml b/.github/workflows/beam_PreCommit_Java.yml index fe694b4eab70..1614e9e64c05 100644 --- a/.github/workflows/beam_PreCommit_Java.yml +++ b/.github/workflows/beam_PreCommit_Java.yml @@ -26,7 +26,6 @@ on: - 'examples/kotlin/**' - 'release/**' - '.github/workflows/beam_PreCommit_Java.yml' - - '!sdks/java/extensions/avro/**' - '!sdks/java/extensions/sql/**' - '!sdks/java/io/amazon-web-services/**' - '!sdks/java/io/amazon-web-services2/**' @@ -78,7 +77,7 @@ on: - 'examples/kotlin/**' - 'release/**' - 'release/trigger_all_tests.json' - - '!sdks/java/extensions/avro/**' + - '.github/trigger_files/beam_PreCommit_Java.json' - '!sdks/java/extensions/sql/**' - '!sdks/java/io/amazon-web-services/**' - '!sdks/java/io/amazon-web-services2/**' @@ -154,13 +153,13 @@ env: jobs: beam_PreCommit_Java: name: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) - timeout-minutes: 180 runs-on: [self-hosted, ubuntu-20.04, main] strategy: fail-fast: false matrix: job_name: [beam_PreCommit_Java] job_phrase: [Run Java PreCommit] + timeout-minutes: 120 if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || @@ -185,8 +184,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -198,7 +197,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services2_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services2_IO_Direct.yml index 16d6562f827f..f8af89a14ef2 100644 --- a/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services2_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services2_IO_Direct.yml @@ -38,6 +38,7 @@ on: - "sdks/java/io/common/**" - "sdks/java/core/src/main/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Amazon-Web-Services2_IO_Direct.json' - "build.gradle" - "buildSrc/**" - "gradle/**" @@ -117,8 +118,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -130,7 +131,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services_IO_Direct.yml index f7c1d66e2f1e..a693902c8eb3 100644 --- a/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Amazon-Web-Services_IO_Direct.yml @@ -38,6 +38,7 @@ on: - "sdks/java/io/common/**" - "sdks/java/core/src/main/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Amazon-Web-Services_IO_Direct.json' - "build.gradle" - "buildSrc/**" - "gradle/**" @@ -117,8 +118,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -130,7 +131,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Amqp_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Amqp_IO_Direct.yml index 8df097d8428f..4a56321bfe9d 100644 --- a/.github/workflows/beam_PreCommit_Java_Amqp_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Amqp_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/amqp/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Amqp_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Azure_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Azure_IO_Direct.yml index d32361708491..94b7c39b950b 100644 --- a/.github/workflows/beam_PreCommit_Java_Azure_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Azure_IO_Direct.yml @@ -38,6 +38,7 @@ on: - "sdks/java/io/common/**" - "sdks/java/core/src/main/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Azure_IO_Direct.json' - "build.gradle" - "buildSrc/**" - "gradle/**" @@ -110,8 +111,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -123,7 +124,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Cassandra_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Cassandra_IO_Direct.yml index 04e1b10c23bd..daea023283a1 100644 --- a/.github/workflows/beam_PreCommit_Java_Cassandra_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Cassandra_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/cassandra/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Cassandra_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Cdap_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Cdap_IO_Direct.yml index f23aad3ea455..daa275086330 100644 --- a/.github/workflows/beam_PreCommit_Java_Cdap_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Cdap_IO_Direct.yml @@ -31,6 +31,7 @@ on: - "sdks/java/io/hadoop-common/**" - "sdks/java/io/hadoop-format/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Cdap_IO_Direct.json' issue_comment: types: [created] schedule: @@ -96,8 +97,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -109,7 +110,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Clickhouse_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Clickhouse_IO_Direct.yml index f7c5ea1b8615..4ddac57a165b 100644 --- a/.github/workflows/beam_PreCommit_Java_Clickhouse_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Clickhouse_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/clickhouse/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Clickhouse_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Csv_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Csv_IO_Direct.yml index 496e0c15ea66..e1b381d5633e 100644 --- a/.github/workflows/beam_PreCommit_Java_Csv_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Csv_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/csv/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Csv_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Debezium_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Debezium_IO_Direct.yml index 97264ac146a5..5ca6b8afa68e 100644 --- a/.github/workflows/beam_PreCommit_Java_Debezium_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Debezium_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/debezium/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Debezium_IO_Direct.json' issue_comment: types: [created] schedule: @@ -101,8 +102,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -114,7 +115,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_ElasticSearch_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_ElasticSearch_IO_Direct.yml index f51e50aa8200..a6a17cebdef9 100644 --- a/.github/workflows/beam_PreCommit_Java_ElasticSearch_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_ElasticSearch_IO_Direct.yml @@ -29,6 +29,7 @@ on: - "sdks/java/io/elasticsearch/**" - "sdks/java/io/elasticsearch-tests/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_ElasticSearch_IO_Direct.json' issue_comment: types: [created] schedule: @@ -105,8 +106,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -118,7 +119,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Examples_Dataflow.yml b/.github/workflows/beam_PreCommit_Java_Examples_Dataflow.yml index b96e459470cf..b7f762ca808c 100644 --- a/.github/workflows/beam_PreCommit_Java_Examples_Dataflow.yml +++ b/.github/workflows/beam_PreCommit_Java_Examples_Dataflow.yml @@ -39,6 +39,7 @@ on: - 'examples/kotlin/**' - 'release/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Examples_Dataflow.json' issue_comment: types: [created] schedule: @@ -73,7 +74,7 @@ permissions: jobs: beam_PreCommit_Java_Examples_Dataflow: name: ${{ matrix.job_name }} (${{ matrix.job_phrase }}) - timeout-minutes: 60 + timeout-minutes: 75 runs-on: [self-hosted, ubuntu-20.04, main] strategy: matrix: @@ -112,8 +113,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PreCommit_Java_Examples_Dataflow_Java21.yml b/.github/workflows/beam_PreCommit_Java_Examples_Dataflow_Java21.yml index f482e22ec4a2..bd4f98cd8714 100644 --- a/.github/workflows/beam_PreCommit_Java_Examples_Dataflow_Java21.yml +++ b/.github/workflows/beam_PreCommit_Java_Examples_Dataflow_Java21.yml @@ -37,6 +37,7 @@ on: - 'examples/kotlin/**' - 'release/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Examples_Dataflow_Java21.json' issue_comment: types: [created] schedule: @@ -76,7 +77,7 @@ jobs: matrix: job_name: ["beam_PreCommit_Java_Examples_Dataflow_Java21"] job_phrase: ["Run Java_Examples_Dataflow_Java21 PreCommit"] - timeout-minutes: 60 + timeout-minutes: 75 if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || @@ -120,8 +121,8 @@ jobs: -Pjava21Home=$JAVA_HOME_21_X64 \ max-workers: 12 - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -133,7 +134,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: SpotBugs Results path: '**/build/reports/spotbugs/*.html' \ No newline at end of file diff --git a/.github/workflows/beam_PreCommit_Java_File-schema-transform_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_File-schema-transform_IO_Direct.yml index 8ce77fef6416..7f467abc2749 100644 --- a/.github/workflows/beam_PreCommit_Java_File-schema-transform_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_File-schema-transform_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/file-schema-transform/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_File-schema-transform_IO_Direct.json' issue_comment: types: [created] schedule: @@ -93,8 +94,8 @@ jobs: -PdisableCheckStyle=true \ -Dfile.encoding=UTF-8 \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -106,7 +107,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Flink_Versions.yml b/.github/workflows/beam_PreCommit_Java_Flink_Versions.yml index d5c244b2dbc9..b7ae39a1ee88 100644 --- a/.github/workflows/beam_PreCommit_Java_Flink_Versions.yml +++ b/.github/workflows/beam_PreCommit_Java_Flink_Versions.yml @@ -31,6 +31,7 @@ on: - 'runners/flink/**' - 'release/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Flink_Versions.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: arguments: | -PdisableSpotlessCheck=true -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PreCommit_Java_GCP_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_GCP_IO_Direct.yml index 73dd9d63d872..eec0383c02a9 100644 --- a/.github/workflows/beam_PreCommit_Java_GCP_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_GCP_IO_Direct.yml @@ -37,6 +37,7 @@ on: - "runners/core-construction-java/**" - "runners/core-java/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_GCP_IO_Direct.json' - "sdks/java/core/src/main/**" - "sdks/java/extensions/arrow/**" - "sdks/java/extensions/google-cloud-platform-core/**" @@ -114,8 +115,8 @@ jobs: -PdisableCheckStyle=true \ -PenableJacocoReport \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -127,7 +128,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Google-ads_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Google-ads_IO_Direct.yml index 7e0ca73e7efd..2ddce986fc8e 100644 --- a/.github/workflows/beam_PreCommit_Java_Google-ads_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Google-ads_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/google-ads/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Google-ads_IO_Direct.json' issue_comment: types: [created] schedule: @@ -90,8 +91,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -103,7 +104,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_HBase_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_HBase_IO_Direct.yml index 805561f3475d..52c91a26aa3e 100644 --- a/.github/workflows/beam_PreCommit_Java_HBase_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_HBase_IO_Direct.yml @@ -29,6 +29,7 @@ on: - "sdks/java/io/hbase/**" - "sdks/java/io/hadoop-common/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_HBase_IO_Direct.json' issue_comment: types: [created] schedule: @@ -94,8 +95,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -107,7 +108,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_HCatalog_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_HCatalog_IO_Direct.yml index bb9e5ee31f27..97096108ebd1 100644 --- a/.github/workflows/beam_PreCommit_Java_HCatalog_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_HCatalog_IO_Direct.yml @@ -29,6 +29,7 @@ on: - "sdks/java/io/hcatalog/**" - "sdks/java/io/hadoop-common/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_HCatalog_IO_Direct.json' issue_comment: types: [created] schedule: @@ -94,8 +95,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -107,7 +108,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Hadoop_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Hadoop_IO_Direct.yml index ea09a8df70d6..604c72471fa0 100644 --- a/.github/workflows/beam_PreCommit_Java_Hadoop_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Hadoop_IO_Direct.yml @@ -53,6 +53,7 @@ on: - "sdks/java/io/hadoop-common/**" - "sdks/java/io/hadoop-format/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Hadoop_IO_Direct.json' issue_comment: types: [created] schedule: @@ -132,8 +133,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -145,7 +146,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_IOs_Direct.yml b/.github/workflows/beam_PreCommit_Java_IOs_Direct.yml index 4a221a911009..831a1bec53fa 100644 --- a/.github/workflows/beam_PreCommit_Java_IOs_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_IOs_Direct.yml @@ -29,6 +29,7 @@ on: - "sdks/java/io/common/**" - "sdks/java/core/src/main/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_IOs_Direct.json' issue_comment: types: [created] workflow_dispatch: @@ -93,8 +94,8 @@ jobs: -PdisableCheckStyle=true \ -Dfile.encoding=UTF-8 \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -106,7 +107,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_InfluxDb_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_InfluxDb_IO_Direct.yml index 52a03ceea204..80869d74b642 100644 --- a/.github/workflows/beam_PreCommit_Java_InfluxDb_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_InfluxDb_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/influxdb/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_InfluxDb_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_JDBC_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_JDBC_IO_Direct.yml index f328dd513c88..9a11d4e7ef35 100644 --- a/.github/workflows/beam_PreCommit_Java_JDBC_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_JDBC_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/jdbc/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_JDBC_IO_Direct.json' issue_comment: types: [created] schedule: @@ -99,8 +100,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -112,7 +113,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Jms_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Jms_IO_Direct.yml index 9abd63bb79c0..8f502223e85e 100644 --- a/.github/workflows/beam_PreCommit_Java_Jms_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Jms_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/jms/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Jms_IO_Direct.json' issue_comment: types: [created] schedule: @@ -99,8 +100,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -112,7 +113,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Kafka_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Kafka_IO_Direct.yml index c73c384c7881..ac63a39380b2 100644 --- a/.github/workflows/beam_PreCommit_Java_Kafka_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Kafka_IO_Direct.yml @@ -35,6 +35,7 @@ on: - "sdks/java/io/synthetic/**" - "sdks/java/io/expansion-service/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Kafka_IO_Direct.json' issue_comment: types: [created] schedule: @@ -101,8 +102,8 @@ jobs: -PdisableCheckStyle=true \ --no-parallel \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -114,7 +115,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Kinesis_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Kinesis_IO_Direct.yml index 2aa4d4c10fd8..f76fec0bdd91 100644 --- a/.github/workflows/beam_PreCommit_Java_Kinesis_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Kinesis_IO_Direct.yml @@ -45,6 +45,7 @@ on: - "gradle.bat" - "settings.gradle.kts" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Kinesis_IO_Direct.json' issue_comment: types: [created] schedule: @@ -124,8 +125,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -137,7 +138,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Kudu_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Kudu_IO_Direct.yml index 25f90072db5c..8948bedf1384 100644 --- a/.github/workflows/beam_PreCommit_Java_Kudu_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Kudu_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/kudu/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Kudu_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_MongoDb_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_MongoDb_IO_Direct.yml index 34fa18a23c71..f0647f93c52b 100644 --- a/.github/workflows/beam_PreCommit_Java_MongoDb_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_MongoDb_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/mongodb/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_MongoDb_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Mqtt_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Mqtt_IO_Direct.yml index 0b26f28e8f69..17c4127cb40d 100644 --- a/.github/workflows/beam_PreCommit_Java_Mqtt_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Mqtt_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/mqtt/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Mqtt_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Neo4j_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Neo4j_IO_Direct.yml index ed6d6baf456b..a4e0ac744f41 100644 --- a/.github/workflows/beam_PreCommit_Java_Neo4j_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Neo4j_IO_Direct.yml @@ -29,6 +29,7 @@ on: - "sdks/java/io/neo4j/**" - "sdks/java/testing/test-utils/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Neo4j_IO_Direct.json' issue_comment: types: [created] schedule: @@ -101,8 +102,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -114,7 +115,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_PVR_Flink_Batch.yml b/.github/workflows/beam_PreCommit_Java_PVR_Flink_Batch.yml index 0ff7785650de..2cadc893fbc1 100644 --- a/.github/workflows/beam_PreCommit_Java_PVR_Flink_Batch.yml +++ b/.github/workflows/beam_PreCommit_Java_PVR_Flink_Batch.yml @@ -33,6 +33,7 @@ on: - 'runners/java-fn-execution/**' - 'sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_PVR_Flink_Batch.json' issue_comment: types: [created] schedule: @@ -97,13 +98,13 @@ jobs: env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH }} - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" - name: Upload test report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: java-code-coverage-report path: "**/build/test-results/**/*.xml" diff --git a/.github/workflows/beam_PreCommit_Java_PVR_Flink_Docker.yml b/.github/workflows/beam_PreCommit_Java_PVR_Flink_Docker.yml index 841c6dbdcbab..ae62fd93d977 100644 --- a/.github/workflows/beam_PreCommit_Java_PVR_Flink_Docker.yml +++ b/.github/workflows/beam_PreCommit_Java_PVR_Flink_Docker.yml @@ -37,6 +37,7 @@ on: - 'runners/flink/**' - 'runners/java-fn-execution/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_PVR_Flink_Docker.json' issue_comment: types: [created] schedule: @@ -102,8 +103,8 @@ jobs: env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PreCommit_Java_Parquet_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Parquet_IO_Direct.yml index 998a6bbbac04..e2430ae1bef9 100644 --- a/.github/workflows/beam_PreCommit_Java_Parquet_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Parquet_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/parquet/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Parquet_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Pulsar_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Pulsar_IO_Direct.yml index 00becbfd7ee6..dd3402c79773 100644 --- a/.github/workflows/beam_PreCommit_Java_Pulsar_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Pulsar_IO_Direct.yml @@ -38,6 +38,7 @@ on: - "sdks/java/io/common/**" - "sdks/java/core/src/main/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Pulsar_IO_Direct.json' - "build.gradle" - "buildSrc/**" - "gradle/**" @@ -110,8 +111,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -123,7 +124,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_RabbitMq_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_RabbitMq_IO_Direct.yml index 7adbfe74bab5..f796f842311f 100644 --- a/.github/workflows/beam_PreCommit_Java_RabbitMq_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_RabbitMq_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/rabbitmq/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_RabbitMq_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Redis_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Redis_IO_Direct.yml index 255df740a57f..8e2483357cc6 100644 --- a/.github/workflows/beam_PreCommit_Java_Redis_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Redis_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/redis/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Redis_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_RequestResponse_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_RequestResponse_IO_Direct.yml index cfe721c1e948..a2fabd3debd0 100644 --- a/.github/workflows/beam_PreCommit_Java_RequestResponse_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_RequestResponse_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/rrio/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_RequestResponse_IO_Direct.json' issue_comment: types: [created] schedule: @@ -90,8 +91,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -103,7 +104,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_SingleStore_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_SingleStore_IO_Direct.yml index 733921da7ad2..620ab5ccec17 100644 --- a/.github/workflows/beam_PreCommit_Java_SingleStore_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_SingleStore_IO_Direct.yml @@ -29,6 +29,7 @@ on: - "sdks/java/io/singlestore/**" - "sdks/java/testing/test-utils/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_SingleStore_IO_Direct.json' issue_comment: types: [created] schedule: @@ -94,8 +95,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -107,7 +108,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Snowflake_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Snowflake_IO_Direct.yml index c84f0026b726..8a3041bccbfb 100644 --- a/.github/workflows/beam_PreCommit_Java_Snowflake_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Snowflake_IO_Direct.yml @@ -31,6 +31,7 @@ on: - "sdks/java/extensions/google-cloud-platform-core/**" - "sdks/java/testing/test-utils/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Snowflake_IO_Direct.json' issue_comment: types: [created] schedule: @@ -103,8 +104,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -116,7 +117,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Solr_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Solr_IO_Direct.yml index f3142b41dd4d..2071f894de8c 100644 --- a/.github/workflows/beam_PreCommit_Java_Solr_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Solr_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/solr/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Solr_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Spark3_Versions.yml b/.github/workflows/beam_PreCommit_Java_Spark3_Versions.yml index 9665e9770bc0..d22cd8cad7e6 100644 --- a/.github/workflows/beam_PreCommit_Java_Spark3_Versions.yml +++ b/.github/workflows/beam_PreCommit_Java_Spark3_Versions.yml @@ -29,6 +29,7 @@ on: paths: - 'runners/spark/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Spark3_Versions.json' issue_comment: types: [created] schedule: @@ -101,8 +102,8 @@ jobs: gradle-command: :runners:spark:3:sparkVersionsTest arguments: -PdisableSpotlessCheck=true - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" diff --git a/.github/workflows/beam_PreCommit_Java_Splunk_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Splunk_IO_Direct.yml index 7032cc62a62a..0106ae61ab8f 100644 --- a/.github/workflows/beam_PreCommit_Java_Splunk_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Splunk_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/splunk/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Splunk_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml index da26e226e105..442eda5a8bc8 100644 --- a/.github/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/thrift/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Thrift_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml b/.github/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml index 20eff37521e2..407ea3218288 100644 --- a/.github/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml +++ b/.github/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml @@ -27,6 +27,7 @@ on: paths: - "sdks/java/io/tika/**" - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Java_Tika_IO_Direct.json' issue_comment: types: [created] schedule: @@ -92,8 +93,8 @@ jobs: -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -105,7 +106,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Kotlin_Examples.yml b/.github/workflows/beam_PreCommit_Kotlin_Examples.yml index 13d533442575..2a804bb9de09 100644 --- a/.github/workflows/beam_PreCommit_Kotlin_Examples.yml +++ b/.github/workflows/beam_PreCommit_Kotlin_Examples.yml @@ -39,6 +39,7 @@ on: - 'examples/kotlin/**' - 'release/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Kotlin_Examples.json' issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_Portable_Python.yml b/.github/workflows/beam_PreCommit_Portable_Python.yml index 169bdb74a649..2fa4905ca736 100644 --- a/.github/workflows/beam_PreCommit_Portable_Python.yml +++ b/.github/workflows/beam_PreCommit_Portable_Python.yml @@ -43,6 +43,7 @@ on: - 'sdks/python/**' - 'release/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Portable_Python.json' issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_Python.yml b/.github/workflows/beam_PreCommit_Python.yml index 670f41875ffb..cb7660e04245 100644 --- a/.github/workflows/beam_PreCommit_Python.yml +++ b/.github/workflows/beam_PreCommit_Python.yml @@ -17,7 +17,7 @@ name: PreCommit Python on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python.json'] issue_comment: types: [created] push: @@ -99,10 +99,10 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PreCommit_PythonDocker.yml b/.github/workflows/beam_PreCommit_PythonDocker.yml index 26ecf97c9b52..754fbac95d0d 100644 --- a/.github/workflows/beam_PreCommit_PythonDocker.yml +++ b/.github/workflows/beam_PreCommit_PythonDocker.yml @@ -17,7 +17,7 @@ name: PreCommit Python Docker on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_PythonDocker.json'] issue_comment: types: [created] push: diff --git a/.github/workflows/beam_PreCommit_PythonDocs.yml b/.github/workflows/beam_PreCommit_PythonDocs.yml index 2ada89147321..708aa669efda 100644 --- a/.github/workflows/beam_PreCommit_PythonDocs.yml +++ b/.github/workflows/beam_PreCommit_PythonDocs.yml @@ -18,7 +18,7 @@ name: PreCommit Python Docs on: pull_request_target: branches: [ "master", "release-*" ] - paths: ["sdks/python/**", 'release/trigger_all_tests.json'] + paths: ["sdks/python/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_PythonDocs.json'] issue_comment: types: [created] push: diff --git a/.github/workflows/beam_PreCommit_PythonFormatter.yml b/.github/workflows/beam_PreCommit_PythonFormatter.yml index 17d9cd855138..8eaf99ba83a3 100644 --- a/.github/workflows/beam_PreCommit_PythonFormatter.yml +++ b/.github/workflows/beam_PreCommit_PythonFormatter.yml @@ -17,7 +17,7 @@ name: PreCommit Python Formatter on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "sdks/python/apache_beam/**", 'release/trigger_all_tests.json'] + paths: [ "sdks/python/apache_beam/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_PythonFormatter.json'] issue_comment: types: [created] push: diff --git a/.github/workflows/beam_PreCommit_PythonLint.yml b/.github/workflows/beam_PreCommit_PythonLint.yml index 583c484e8e93..2cc469170d57 100644 --- a/.github/workflows/beam_PreCommit_PythonLint.yml +++ b/.github/workflows/beam_PreCommit_PythonLint.yml @@ -17,7 +17,7 @@ name: PreCommit Python Lint on: pull_request_target: branches: [ "master", "release-*" ] - paths: ["sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: ["sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_PythonLint.json'] issue_comment: types: [created] push: diff --git a/.github/workflows/beam_PreCommit_Python_Coverage.yml b/.github/workflows/beam_PreCommit_Python_Coverage.yml index f0444013df59..244f37cc208c 100644 --- a/.github/workflows/beam_PreCommit_Python_Coverage.yml +++ b/.github/workflows/beam_PreCommit_Python_Coverage.yml @@ -17,7 +17,7 @@ name: PreCommit Python Coverage on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python_Coverage.json'] issue_comment: types: [created] push: @@ -89,7 +89,7 @@ jobs: arguments: | -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PreCommit_Python_Dataframes.yml b/.github/workflows/beam_PreCommit_Python_Dataframes.yml index 822c10862ac7..2dd6f394c9ca 100644 --- a/.github/workflows/beam_PreCommit_Python_Dataframes.yml +++ b/.github/workflows/beam_PreCommit_Python_Dataframes.yml @@ -17,7 +17,7 @@ name: PreCommit Python Dataframes on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python_Dataframes.json'] issue_comment: types: [created] push: @@ -99,10 +99,10 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PreCommit_Python_Examples.yml b/.github/workflows/beam_PreCommit_Python_Examples.yml index 36a0bde3ebdc..cb0479c218f7 100644 --- a/.github/workflows/beam_PreCommit_Python_Examples.yml +++ b/.github/workflows/beam_PreCommit_Python_Examples.yml @@ -17,7 +17,7 @@ name: PreCommit Python Examples on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python_Examples.json'] issue_comment: types: [created] push: @@ -99,10 +99,10 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PreCommit_Python_Integration.yml b/.github/workflows/beam_PreCommit_Python_Integration.yml index d0af4932168e..461d7891a0a5 100644 --- a/.github/workflows/beam_PreCommit_Python_Integration.yml +++ b/.github/workflows/beam_PreCommit_Python_Integration.yml @@ -17,7 +17,7 @@ name: PreCommit Python Integration on: pull_request_target: branches: [ "master", "release-*" ] - paths: ["model/**", "sdks/python/**", "release/**", 'release/trigger_all_tests.json'] + paths: ["model/**", "sdks/python/**", "release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python_Integration.json'] issue_comment: types: [created] push: @@ -105,10 +105,10 @@ jobs: -PuseWheelDistribution \ -PpythonVersion=${{ matrix.python_version }} \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PreCommit_Python_PVR_Flink.yml b/.github/workflows/beam_PreCommit_Python_PVR_Flink.yml index 7e083bf84313..64609737cb25 100644 --- a/.github/workflows/beam_PreCommit_Python_PVR_Flink.yml +++ b/.github/workflows/beam_PreCommit_Python_PVR_Flink.yml @@ -30,6 +30,7 @@ on: - 'runners/java-fn-execution/**' - 'runners/reference/**' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Python_PVR_Flink.json' issue_comment: types: [created] push: @@ -113,7 +114,7 @@ jobs: arguments: | -PpythonVersion=3.11 \ - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: name: Python Test Results diff --git a/.github/workflows/beam_PreCommit_Python_Runners.yml b/.github/workflows/beam_PreCommit_Python_Runners.yml index f43c4eb47b6e..9d80f9cf5537 100644 --- a/.github/workflows/beam_PreCommit_Python_Runners.yml +++ b/.github/workflows/beam_PreCommit_Python_Runners.yml @@ -17,7 +17,7 @@ name: PreCommit Python Runners on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python_Runners.json'] issue_comment: types: [created] push: @@ -99,10 +99,10 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PreCommit_Python_Transforms.yml b/.github/workflows/beam_PreCommit_Python_Transforms.yml index 6d06596ed4c8..f4895ec27c53 100644 --- a/.github/workflows/beam_PreCommit_Python_Transforms.yml +++ b/.github/workflows/beam_PreCommit_Python_Transforms.yml @@ -17,7 +17,7 @@ name: PreCommit Python Transforms on: pull_request_target: branches: [ "master", "release-*" ] - paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json'] + paths: [ "model/**","sdks/python/**","release/**", 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Python_Transforms.json'] issue_comment: types: [created] push: @@ -99,10 +99,10 @@ jobs: -PpythonVersion=${{ matrix.python_version }} \ -PuseWheelDistribution - name: Archive Python Test Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: failure() with: - name: Python Test Results + name: Python ${{ matrix.python_version }} Test Results path: '**/pytest*.xml' - name: Publish Python Test Results uses: EnricoMi/publish-unit-test-result-action@v2 diff --git a/.github/workflows/beam_PreCommit_SQL.yml b/.github/workflows/beam_PreCommit_SQL.yml index ac72f6b752da..b67addfee1c0 100644 --- a/.github/workflows/beam_PreCommit_SQL.yml +++ b/.github/workflows/beam_PreCommit_SQL.yml @@ -22,7 +22,7 @@ on: paths: ['sdks/java/extensions/sql/**','.github/workflows/beam_PreCommit_SQL.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['sdks/java/extensions/sql/**', 'release/trigger_all_tests.json'] + paths: ['sdks/java/extensions/sql/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_SQL.json'] issue_comment: types: [created] schedule: @@ -91,8 +91,8 @@ jobs: -PdisableCheckStyle=true \ -PenableJacocoReport \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -104,7 +104,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_SQL_Java11.yml b/.github/workflows/beam_PreCommit_SQL_Java11.yml index 9dadb4dcc2ff..da28e6201f1a 100644 --- a/.github/workflows/beam_PreCommit_SQL_Java11.yml +++ b/.github/workflows/beam_PreCommit_SQL_Java11.yml @@ -22,7 +22,7 @@ on: paths: ['sdks/java/extensions/sql/**','.github/workflows/beam_PreCommit_SQL_Java11.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['sdks/java/extensions/sql/**', 'release/trigger_all_tests.json'] + paths: ['sdks/java/extensions/sql/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_SQL_Java11.json'] issue_comment: types: [created] schedule: @@ -92,7 +92,7 @@ jobs: - name: Build and Test uses: ./.github/actions/gradle-command-self-hosted-action with: - gradle-command: :sqlPreCommit + gradle-command: :sdks:java:extensions:sql:preCommit arguments: | -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ @@ -100,8 +100,8 @@ jobs: -PskipCheckerFramework \ -Pjava11Home=$JAVA_HOME_11_X64 \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -113,7 +113,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_SQL_Java17.yml b/.github/workflows/beam_PreCommit_SQL_Java17.yml index 8a1e88f7c654..ef0b758ddd5b 100644 --- a/.github/workflows/beam_PreCommit_SQL_Java17.yml +++ b/.github/workflows/beam_PreCommit_SQL_Java17.yml @@ -22,7 +22,7 @@ on: paths: ['sdks/java/extensions/sql/**','.github/workflows/beam_PreCommit_SQL_Java17.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['sdks/java/extensions/sql/**', 'release/trigger_all_tests.json'] + paths: ['sdks/java/extensions/sql/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_SQL_Java17.json'] issue_comment: types: [created] schedule: @@ -90,7 +90,7 @@ jobs: - name: Build and Test uses: ./.github/actions/gradle-command-self-hosted-action with: - gradle-command: :sqlPreCommit + gradle-command: :sdks:java:extensions:sql:preCommit arguments: | -PdisableSpotlessCheck=true \ -PdisableCheckStyle=true \ @@ -98,8 +98,8 @@ jobs: -PskipCheckerFramework \ -Pjava17Home=$JAVA_HOME_17_X64 \ - name: Archive JUnit Test Results - uses: actions/upload-artifact@v3 - if: failure() + uses: actions/upload-artifact@v4 + if: ${{ !success() }} with: name: JUnit Test Results path: "**/build/reports/tests/" @@ -111,7 +111,7 @@ jobs: comment_mode: ${{ github.event_name == 'issue_comment' && 'always' || 'off' }} files: '**/build/test-results/**/*.xml' - name: Archive SpotBugs Results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: SpotBugs Results diff --git a/.github/workflows/beam_PreCommit_Spotless.yml b/.github/workflows/beam_PreCommit_Spotless.yml index 6aa590e7556e..4ab1ade15847 100644 --- a/.github/workflows/beam_PreCommit_Spotless.yml +++ b/.github/workflows/beam_PreCommit_Spotless.yml @@ -36,6 +36,7 @@ on: - 'examples/kotlin/**' - '.test-infra/jenkins/' - 'release/trigger_all_tests.json' + - '.github/trigger_files/beam_PreCommit_Spotless.json' issue_comment: types: [created] schedule: @@ -99,7 +100,7 @@ jobs: with: gradle-command: spotlessCheck checkStyleMain checkStyleTest :buildSrc:spotlessCheck - name: Upload test report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: java-code-coverage-report path: "**/build/reports/checkstyle/*.xml" diff --git a/.github/workflows/beam_PreCommit_Typescript.yml b/.github/workflows/beam_PreCommit_Typescript.yml index f3184dc0f90d..678b8d53efe1 100644 --- a/.github/workflows/beam_PreCommit_Typescript.yml +++ b/.github/workflows/beam_PreCommit_Typescript.yml @@ -24,7 +24,7 @@ on: paths: ['sdks/python/apache_beam/runners/interactive/extensions/**', '.github/workflows/beam_PreCommit_Typescript.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['sdks/python/apache_beam/runners/interactive/extensions/**', 'release/trigger_all_tests.json'] + paths: ['sdks/python/apache_beam/runners/interactive/extensions/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Typescript.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_Website.yml b/.github/workflows/beam_PreCommit_Website.yml index 848b4d44deec..0626596768f8 100644 --- a/.github/workflows/beam_PreCommit_Website.yml +++ b/.github/workflows/beam_PreCommit_Website.yml @@ -22,7 +22,7 @@ on: paths: ['website/**','.github/workflows/beam_PreCommit_Website.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['website/**', 'release/trigger_all_tests.json'] + paths: ['website/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Website.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_Website_Stage_GCS.yml b/.github/workflows/beam_PreCommit_Website_Stage_GCS.yml index c7b51f137290..4696d3c6b9c8 100644 --- a/.github/workflows/beam_PreCommit_Website_Stage_GCS.yml +++ b/.github/workflows/beam_PreCommit_Website_Stage_GCS.yml @@ -22,7 +22,7 @@ on: paths: ['website/**','.github/workflows/beam_PreCommit_Website_Stage_GCS.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['website/**', 'release/trigger_all_tests.json'] + paths: ['website/**', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Website_Stage_GCS.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PreCommit_Whitespace.yml b/.github/workflows/beam_PreCommit_Whitespace.yml index da58d309f354..e5a13e3cb3cf 100644 --- a/.github/workflows/beam_PreCommit_Whitespace.yml +++ b/.github/workflows/beam_PreCommit_Whitespace.yml @@ -22,7 +22,7 @@ on: paths: ['**.md', '**.build.gradle', 'build.gradle.kts', '.github/workflows/beam_PreCommit_Whitespace.yml'] pull_request_target: branches: ['master', 'release-*'] - paths: ['**.md', '**.build.gradle', 'build.gradle.kts', 'release/trigger_all_tests.json'] + paths: ['**.md', '**.build.gradle', 'build.gradle.kts', 'release/trigger_all_tests.json', '.github/trigger_files/beam_PreCommit_Whitespace.json'] issue_comment: types: [created] schedule: diff --git a/.github/workflows/beam_PostCommit_BeamMetrics_Publish.yml b/.github/workflows/beam_Publish_BeamMetrics.yml similarity index 97% rename from .github/workflows/beam_PostCommit_BeamMetrics_Publish.yml rename to .github/workflows/beam_Publish_BeamMetrics.yml index b655eab8088c..53f62b3bf9d4 100644 --- a/.github/workflows/beam_PostCommit_BeamMetrics_Publish.yml +++ b/.github/workflows/beam_Publish_BeamMetrics.yml @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: PostCommit BeamMetrics Publish +name: Publish BeamMetrics on: schedule: @@ -48,7 +48,7 @@ env: GRADLE_ENTERPRISE_CACHE_PASSWORD: ${{ secrets.GE_CACHE_PASSWORD }} jobs: - beam_PostCommit_BeamMetrics_Publish: + beam_Publish_BeamMetrics: if: | github.event_name == 'push' || github.event_name == 'workflow_dispatch' || diff --git a/.github/workflows/beam_PostCommit_Website_Publish.yml b/.github/workflows/beam_Publish_Website.yml similarity index 95% rename from .github/workflows/beam_PostCommit_Website_Publish.yml rename to .github/workflows/beam_Publish_Website.yml index ed1729021e98..726071641b7b 100644 --- a/.github/workflows/beam_PostCommit_Website_Publish.yml +++ b/.github/workflows/beam_Publish_Website.yml @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: PostCommit Website Publish +name: Publish Website on: schedule: @@ -47,13 +47,13 @@ env: GRADLE_ENTERPRISE_CACHE_PASSWORD: ${{ secrets.GE_CACHE_PASSWORD }} jobs: - beam_PostCommit_Website_Publish: + beam_Publish_Website: if: | github.event_name == 'workflow_dispatch' || (github.event_name == 'schedule' && github.repository == 'apache/beam') runs-on: [self-hosted, ubuntu-20.04, main] timeout-minutes: 30 - name: beam_PostCommit_Website_Publish + name: beam_Publish_Website steps: - uses: actions/checkout@v4 - name: Setup environment diff --git a/.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml b/.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml index 6681669f5958..8871af45b50e 100644 --- a/.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml +++ b/.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml @@ -109,9 +109,9 @@ jobs: env: MULTIARCH_TAG: ${{ steps.set_tag.outputs.TAG }} USER: github-actions - - name: Archive code coverage results - uses: actions/upload-artifact@v3 - if: always() + - name: Archive Python Test Results + uses: actions/upload-artifact@v4 + if: failure() with: - name: python-code-coverage-report + name: Python ${{ matrix.python_version }} Test Results path: "**/pytest*.xml" diff --git a/.github/workflows/build_release_candidate.yml b/.github/workflows/build_release_candidate.yml index 6f1499a63769..8d27cea1d5d5 100644 --- a/.github/workflows/build_release_candidate.yml +++ b/.github/workflows/build_release_candidate.yml @@ -59,7 +59,7 @@ jobs: ref: "v${{ github.event.inputs.RELEASE }}-RC${{ github.event.inputs.RC }}" repository: apache/beam - name: Install Java 8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '8' @@ -95,9 +95,13 @@ jobs: if: ${{github.event.inputs.STAGE_JAVA_SOURCE == 'yes'}} runs-on: ubuntu-latest steps: - - name: Validate and mask apache id/password + - name: Mask Apache Password + run: | + # Workaround for Actions bug - https://github.com/actions/runner/issues/643 + APACHE_PASS=$(jq -r '.inputs.APACHE_PASSWORD' $GITHUB_EVENT_PATH) + echo "::add-mask::$APACHE_PASS" + - name: Validate apache id/password run: | - echo "::add-mask::${{ github.event.inputs.APACHE_PASSWORD }}" if [ "${{ github.event.inputs.APACHE_ID }}" == "" ] then echo "Must provide an apache id to stage artifacts to https://dist.apache.org/repos/dist/dev/beam/" @@ -107,7 +111,7 @@ jobs: echo "Must provide an apache password to stage artifacts to https://dist.apache.org/repos/dist/dev/beam/" fi - name: Install Java 8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '8' @@ -149,7 +153,7 @@ jobs: svn add --force . svn status - svn commit -m "Staging Java artifacts for Apache Beam ${{ github.event.inputs.RELEASE }} RC${{ github.event.inputs.RC }}" --non-interactive --username ${{ github.event.inputs.APACHE_ID }} --password ${{ github.event.inputs.APACHE_PASSWORD }} + svn commit -m "Staging Java artifacts for Apache Beam ${{ github.event.inputs.RELEASE }} RC${{ github.event.inputs.RC }}" --non-interactive --username "${{ github.event.inputs.APACHE_ID }}" --password "${{ github.event.inputs.APACHE_PASSWORD }}" stage_python_artifacts: if: ${{github.event.inputs.STAGE_PYTHON_ARTIFACTS == 'yes'}} @@ -157,9 +161,13 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Validate and mask apache id/password + - name: Mask Apache Password + run: | + # Workaround for Actions bug - https://github.com/actions/runner/issues/643 + APACHE_PASS=$(jq -r '.inputs.APACHE_PASSWORD' $GITHUB_EVENT_PATH) + echo "::add-mask::$APACHE_PASS" + - name: Mask apache id/password run: | - echo "::add-mask::${{ github.event.inputs.APACHE_PASSWORD }}" if [ "${{ github.event.inputs.APACHE_ID }}" == "" ] then echo "Must provide an apache id to stage artifacts to https://dist.apache.org/repos/dist/dev/beam/" @@ -227,7 +235,7 @@ jobs: cd .. svn add --force python svn status - svn commit -m "Staging Python artifacts for Apache Beam ${RELEASE} RC${RC_NUM}" --non-interactive --username ${{ github.event.inputs.APACHE_ID }} --password ${{ github.event.inputs.APACHE_PASSWORD }} + svn commit -m "Staging Python artifacts for Apache Beam ${RELEASE} RC${RC_NUM}" --non-interactive --username "${{ github.event.inputs.APACHE_ID }}" --password "${{ github.event.inputs.APACHE_PASSWORD }}" stage_docker: @@ -243,19 +251,19 @@ jobs: - name: Free Disk Space (Ubuntu) uses: jlumbroso/free-disk-space@v1.3.0 - name: Install Java 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '11' - name: Install Python 3.8 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' - run: echo $JAVA_HOME - run: echo "JAVA11_HOME=${JAVA_HOME}" >> "$GITHUB_OUTPUT" id: export-java11 - name: Install Java 8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '8' @@ -298,7 +306,7 @@ jobs: token: ${{ github.event.inputs.BEAM_SITE_TOKEN }} ref: release-docs - name: Install Python 3.8 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' - name: Install node @@ -306,7 +314,7 @@ jobs: with: node-version: '16' - name: Install Java 8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '8' @@ -338,7 +346,7 @@ jobs: - name: Add canonical link into javadocs uses: cicirello/javadoc-cleanup@v1 with: - path-to-root: ${BEAM_ROOT_DIR}/sdks/java/javadoc/build/docs/javadoc + path-to-root: beam/sdks/java/javadoc/build/docs/javadoc base-url-path: https://beam.apache.org/releases/javadoc/current/ - name: Consolidate Release Docs to beam-site branch with symlinks working-directory: beam-site diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 58d14c6fcd68..82924c3abcbd 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -89,7 +89,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Get tag @@ -125,12 +125,12 @@ jobs: working-directory: ./sdks/python run: mv $(ls | grep apache-beam) apache-beam-source - name: Upload source as artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: source path: sdks/python/apache-beam-source - name: Upload compressed sources as artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: source_zip path: sdks/python/dist @@ -167,13 +167,13 @@ jobs: run: mv $(ls | grep apache-beam) apache-beam-source-rc - name: Upload RC source as artifact if: steps.is_rc.outputs.is_rc == 1 - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: source_rc${{ steps.get_rc_version.outputs.RC_NUM }} path: sdks/python/apache-beam-source-rc - name: Upload compressed RC sources as artifacts if: steps.is_rc.outputs.is_rc == 1 - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: source_zip_rc${{ steps.get_rc_version.outputs.RC_NUM }} path: sdks/python/dist @@ -204,7 +204,7 @@ jobs: if: needs.check_env_variables.outputs.gcp-variables-set == 'true' steps: - name: Download compressed sources from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: source_zip path: source/ @@ -237,18 +237,18 @@ jobs: arch: aarch64 steps: - name: Download python source distribution from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: source path: apache-beam-source - name: Download Python SDK RC source distribution from artifacts if: ${{ needs.build_source.outputs.is_rc == 1 }} - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: source_rc${{ needs.build_source.outputs.rc_num }} path: apache-beam-source-rc - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - uses: docker/setup-qemu-action@v1 @@ -278,9 +278,9 @@ jobs: done shell: bash - name: Upload wheels as artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: wheelhouse-${{ matrix.os_python.os }} + name: wheelhouse-${{ matrix.os_python.os }}${{ (matrix.arch == 'aarch64' && '-aarch64') || '' }} path: apache-beam-source/wheelhouse/ - name: Build RC wheels if: ${{ needs.build_source.outputs.is_rc == 1 }} @@ -303,9 +303,9 @@ jobs: shell: bash - name: Upload RC wheels as artifacts if: ${{ needs.build_source.outputs.is_rc == 1 }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: wheelhouse-rc${{ needs.build_source.outputs.rc_num }}-${{ matrix.os_python.os }} + name: wheelhouse-rc${{ needs.build_source.outputs.rc_num }}-${{ matrix.os_python.os }}${{ (matrix.arch == 'aarch64' && '-aarch64') || '' }} path: apache-beam-source-rc/wheelhouse/ upload_wheels_to_gcs: @@ -318,11 +318,15 @@ jobs: strategy: matrix: os : [ubuntu-latest, macos-latest, windows-latest] + arch: [auto] + include: + - os: "ubuntu-latest" + arch: aarch64 steps: - name: Download wheels from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: wheelhouse-${{ matrix.os }} + name: wheelhouse-${{ matrix.os }}${{ (matrix.arch == 'aarch64' && '-aarch64') || '' }} path: wheelhouse/ - name: Authenticate on GCP uses: google-github-actions/setup-gcloud@v0 diff --git a/.github/workflows/code_completion_plugin_tests.yml b/.github/workflows/code_completion_plugin_tests.yml index 38ffd2fbd3f4..ca03932c9944 100644 --- a/.github/workflows/code_completion_plugin_tests.yml +++ b/.github/workflows/code_completion_plugin_tests.yml @@ -73,7 +73,7 @@ jobs: # Setup Java environment for the next steps - name: Setup Java - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: '11' @@ -109,7 +109,7 @@ jobs: # Collect Tests Result of failed tests - name: Collect Tests Result if: ${{ failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: tests-result path: ${{ github.workspace }}/build/reports/tests diff --git a/.github/workflows/cut_release_branch.yml b/.github/workflows/cut_release_branch.yml index 4201d6018c60..4c03e45913b4 100644 --- a/.github/workflows/cut_release_branch.yml +++ b/.github/workflows/cut_release_branch.yml @@ -21,28 +21,21 @@ # To learn more about GitHub Actions in Apache Beam check the CI.md # Workflow used after https://github.com/apache/beam/commit/4183e747becebd18becee5fff547af365910fc9c -# If help is needed debugging issues, you can view the cut_release_branch.sh and start_snapshot_build.sh scripts at that commit +# If help is needed debugging issues, you can view the cut_release_branch.sh script at that commit # for guidance on how to do this manually. -# (https://github.com/apache/beam/blob/4183e747becebd18becee5fff547af365910fc9c/release/src/main/scripts/cut_release_branch.sh and -# https://github.com/apache/beam/blob/4183e747becebd18becee5fff547af365910fc9c/release/src/main/scripts/start_snapshot_build.sh). +# (https://github.com/apache/beam/blob/4183e747becebd18becee5fff547af365910fc9c/release/src/main/scripts/cut_release_branch.sh). name: Cut Release Branch on: workflow_dispatch: inputs: RELEASE_VERSION: - description: Beam version of current release + description: Beam version of current release (branch being cut) required: true default: '2.XX.0' NEXT_VERSION: description: Next release version required: true default: '2.XX.0' - JENKINS_USERNAME: - description: Username of the current Jenkins user. Used to update mass_comment.py with all Jenkins jobs. - required: true - JENKINS_TOKEN: - description: API Token for the current Jenkins user. Can be generated at https://ci-beam.apache.org/user//configure - required: true CREATE_RELEASE_BRANCH: description: Whether to cut the release branch. You shouldnt skip this unless it has already been completed successfully (yes/no) required: true @@ -102,11 +95,6 @@ jobs: SCRIPT_DIR: ./release/src/main/scripts RELEASE: ${{ github.event.inputs.RELEASE_VERSION }} steps: - - name: Mask Jenkins token - run: | - echo "::add-mask::$JENKINS_TOKEN" - env: - JENKINS_TOKEN: ${{ github.event.inputs.JENKINS_TOKEN }} - name: Validate Next Version run: | if [[ $NEXT_RELEASE =~ ([0-9]+\.[0-9]+\.[0-9]+) ]]; then @@ -123,34 +111,6 @@ jobs: git config user.email actions@"$RUNNER_NAME".local - name: Install xmllint run: sudo apt-get install -y libxml2-utils - - name: Update mass_comment.py with new Jenkins jobs - run: | - rm release/src/main/scripts/jenkins_jobs.txt - for obj in $(curl https://ci-beam.apache.org/api/json | jq '.jobs[]' -c); do - echo $obj - url=$(echo $obj | jq '.url' -r || echo "") - name=$(echo $obj | jq '.name' -r || echo "") - if [ "$url" != "" ] && [ "$name" != "" ]; then - curl --user $JENKINS_USERNAME:$JENKINS_TOKEN $url/config.xml > /tmp/config.xml - - if [ "$(xmllint --xpath 'string(//disabled)' /tmp/config.xml)" = "true" ]; then - echo "SKIPPING $url - disabled" - else - phrase=$(xmllint --xpath 'string(//triggers/org.jenkinsci.plugins.ghprb.GhprbTrigger/triggerPhrase)' /tmp/config.xml) - if [ "$phrase" != "" ]; then - echo "Appending ${phrase},${name}" - echo "${phrase},${name}" >> /tmp/result - fi - fi - - rm /tmp/config.xml - fi - done - - cat /tmp/result | sort | uniq | grep -i -E 'precommit|postcommit|validates|vr|example|test' | grep -v -i -E 'load|perf|website' >> release/src/main/scripts/jenkins_jobs.txt - env: - JENKINS_USERNAME: ${{ github.event.inputs.JENKINS_USERNAME }} - JENKINS_TOKEN: ${{ github.event.inputs.JENKINS_TOKEN }} - name: Update .asf.yaml to protect new release branch from force push run: | sed -i -e "s/master: {}/master: {}\n release-${RELEASE}: {}/g" .asf.yaml @@ -177,36 +137,6 @@ jobs: BRANCH_NAME: snapshot_build-${{ github.event.inputs.RELEASE_VERSION }} RELEASE_BRANCH: release-${{ github.event.inputs.RELEASE_VERSION }} steps: - - name: Install Hub - run: | - cd ~ - wget https://github.com/github/hub/releases/download/v2.14.2/hub-linux-amd64-2.14.2.tgz - tar zvxvf hub-linux-amd64-2.14.2.tgz - sudo ./hub-linux-amd64-2.14.2/install - echo "eval "$(hub alias -s)"" >> ~/.bashrc - - uses: actions/checkout@v4 - - name: Set git config - run: | - git config user.name $GITHUB_ACTOR - git config user.email actions@"$RUNNER_NAME".local - - name: Create Snapshot Branch - run: | - git remote add ${REMOTE_NAME} ${REMOTE_URL} - git checkout -b ${BRANCH_NAME} - touch empty_file.json - git add -A - git commit -m "Add empty file in order to create PR" - git push -f ${REMOTE_NAME} - - name: Create Pull Request - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Trigger snapshot build run: | - hub pull-request -b apache:${RELEASE_BRANCH} -F- <<<"[DO NOT MERGE]Start snapshot build for release process - - - Run Gradle Publish" - - echo "NOTE: If there is no jenkins job started, please comment generated PR with: Run Gradle Publish" - echo "Things remained you need to do manually after build successful:" - echo "1. Close this generated PR in github website." - echo "2. Delete your remote branch ${BRANCH_NAME} form your beam repo in github website." + echo "Working in progress. Please trigger 'Release Nightly Snapshot' GHA workflow manually." diff --git a/.github/workflows/dask_runner_tests.yml b/.github/workflows/dask_runner_tests.yml index 35c320086992..628cf2c34462 100644 --- a/.github/workflows/dask_runner_tests.yml +++ b/.github/workflows/dask_runner_tests.yml @@ -41,7 +41,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Build source @@ -51,7 +51,7 @@ jobs: working-directory: ./sdks/python/dist run: mv $(ls | grep "apache-beam.*tar\.gz") apache-beam-source.tar.gz - name: Upload compressed sources as artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: python_sdk_source path: sdks/python/dist/apache-beam-source.tar.gz @@ -72,7 +72,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.params.py_ver }} - name: Install tox @@ -89,7 +89,7 @@ jobs: working-directory: ./sdks/python run: tox -c tox.ini -e ${{ matrix.params.tox_env }}-win-dask - name: Upload test logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: pytest-${{matrix.os}}-${{matrix.params.py_ver}} diff --git a/.github/workflows/deploy_release_candidate_pypi.yaml b/.github/workflows/deploy_release_candidate_pypi.yaml index fd3994f658e8..8dafd520b1bf 100644 --- a/.github/workflows/deploy_release_candidate_pypi.yaml +++ b/.github/workflows/deploy_release_candidate_pypi.yaml @@ -31,8 +31,9 @@ jobs: steps: - name: Mask PyPi id/password run: | - echo "::add-mask::${{ github.event.inputs.PYPI_USER }}" - echo "::add-mask::${{ github.event.inputs.PYPI_PASSWORD }}" + # Workaround for Actions bug - https://github.com/actions/runner/issues/643 + PYPI_PASSWORD=$(jq -r '.inputs.PYPI_PASSWORD' $GITHUB_EVENT_PATH) + echo "::add-mask::$PYPI_PASSWORD" - name: Checkout uses: actions/checkout@v4 - name: Setup environment @@ -86,4 +87,4 @@ jobs: echo "Will upload the following files to PyPI:" ls - twine upload * -u ${{ github.event.inputs.PYPI_USER }} -p ${{ github.event.inputs.PYPI_PASSWORD }} + twine upload * -u "${{ github.event.inputs.PYPI_USER }}" -p "${{ github.event.inputs.PYPI_PASSWORD }}" diff --git a/.github/workflows/finalize_release.yml b/.github/workflows/finalize_release.yml index 9a8b47ad3e42..f3eb544139c7 100644 --- a/.github/workflows/finalize_release.yml +++ b/.github/workflows/finalize_release.yml @@ -13,11 +13,8 @@ on: RC: description: Integer RC version for the release that we'd like to finalize (e.g. 3 for RC3) required: true - PYPI_USER: - description: PyPi username to perform the PyPi upload with - required: false - PYPI_PASSWORD: - description: PyPi password to perform the PyPi upload with + PYPI_API_TOKEN: + description: PyPi API token to perform the PyPi upload with required: false PUSH_DOCKER_ARTIFACTS: description: Whether to push SDK docker images to docker hub Apache organization. Should be yes unless you've already completed this step. @@ -91,16 +88,15 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Mask and validate PyPi id/password + - name: Mask PyPi password run: | - echo "::add-mask::${{ github.event.inputs.PYPI_USER }}" - echo "::add-mask::${{ github.event.inputs.PYPI_PASSWORD }}" - if [ "${{ github.event.inputs.PYPI_USER }}" == "" ] - then - echo "Must provide a PyPi username to publish artifacts to PyPi" - exit 1 - fi - if [ "${{ github.event.inputs.PYPI_PASSWORD }}" == "" ] + # Workaround for Actions bug - https://github.com/actions/runner/issues/643 + PYPI_PASSWORD=$(jq -r '.inputs.PYPI_PASSWORD' $GITHUB_EVENT_PATH) + echo "::add-mask::$PYPI_PASSWORD" + - name: Validate PyPi id/password + run: | + echo "::add-mask::${{ github.event.inputs.PYPI_API_TOKEN }}" + if [ "${{ github.event.inputs.PYPI_API_TOKEN }}" == "" ] then echo "Must provide a PyPi password to publish artifacts to PyPi" exit 1 @@ -122,7 +118,7 @@ jobs: cd "dist.apache.org/repos/dist/dev/beam/${RELEASE}/python/" echo "Will upload the following files to PyPI:" ls - twine upload * -u ${{ github.event.inputs.PYPI_USER }} -p ${{ github.event.inputs.PYPI_PASSWORD }} + twine upload * -u __token__ -p "${{ github.event.inputs.PYPI_API_TOKEN }}" push_git_tags: if: ${{github.event.inputs.TAG_RELEASE == 'yes'}} diff --git a/.github/workflows/go_tests.yml b/.github/workflows/go_tests.yml index 02947eff5ca0..ae5960666363 100644 --- a/.github/workflows/go_tests.yml +++ b/.github/workflows/go_tests.yml @@ -36,14 +36,14 @@ concurrency: cancel-in-progress: true jobs: build: - runs-on: ubuntu-latest + runs-on: [self-hosted, ubuntu-20.04, main] name: Go Build steps: - name: Check out code uses: actions/checkout@v4 with: fetch-depth: 2 - - uses: actions/setup-go@v4 + - uses: actions/setup-go@v5 with: go-version: '1.21' cache-dependency-path: | diff --git a/.github/workflows/java_tests.yml b/.github/workflows/java_tests.yml index 1132ba1c196b..bcb073f5a977 100644 --- a/.github/workflows/java_tests.yml +++ b/.github/workflows/java_tests.yml @@ -47,7 +47,7 @@ jobs: check_gcp_variables: timeout-minutes: 5 name: "Check GCP variables set" - runs-on: ubuntu-latest + runs-on: [self-hosted, ubuntu-20.04, main] outputs: gcp-variables-set: ${{ steps.check_gcp_variables.outputs.gcp-variables-set }} steps: @@ -70,7 +70,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [[self-hosted, ubuntu-20.04, main], macos-latest, windows-latest] steps: - name: Check out code uses: actions/checkout@v4 @@ -82,42 +82,40 @@ jobs: with: java-version: 8 go-version: 1.21 - - name: Remove default github maven configuration - # This step is a workaround to avoid a decryption issue of Beam's - # net.linguica.gradle.maven.settings plugin and github's provided maven - # settings.xml file - run: rm ~/.m2/settings.xml # :sdks:java:core:test - name: Run :sdks:java:core:test - uses: ./.github/actions/gradle-command-action + uses: ./.github/actions/gradle-command-self-hosted-action with: - arguments: -p sdks/java/core/ test + gradle-command: test + arguments: -p sdks/java/core/ - name: Upload test logs for :sdks:java:core:test - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: java_unit_tests-sdks-java-core-test-${{ matrix.os }} path: sdks/java/core/build/reports/tests/test # :sdks:java:harness:test - name: Run :sdks:java:harness:test - uses: ./.github/actions/gradle-command-action + uses: ./.github/actions/gradle-command-self-hosted-action with: - arguments: -p sdks/java/harness/ test + gradle-command: test + arguments: -p sdks/java/harness/ if: always() - name: Upload test logs for :sdks:java:harness:test - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: java_unit_tests-sdks-java-harness-test-${{ matrix.os }} path: sdks/java/harness/build/reports/tests/test # :runners:core-java:test - name: Run :runners:core-java:test - uses: ./.github/actions/gradle-command-action + uses: ./.github/actions/gradle-command-self-hosted-action with: - arguments: -p runners/core-java/ test + gradle-command: test + arguments: -p runners/core-java/ if: always() - name: Upload test logs for :runners:core-java:test - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: java_unit_tests-runners-core-java-test-${{ matrix.os }} @@ -129,7 +127,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [[self-hosted, ubuntu-20.04, main], macos-latest, windows-latest] steps: - name: Check out code uses: actions/checkout@v4 @@ -141,19 +139,15 @@ jobs: with: java-version: 8 go-version: 1.21 - - name: Remove default github maven configuration - # This step is a workaround to avoid a decryption issue of Beam's - # net.linguica.gradle.maven.settings plugin and github's provided maven - # settings.xml file - run: rm ~/.m2/settings.xml - name: Run WordCount Unix - uses: ./.github/actions/gradle-command-action + uses: ./.github/actions/gradle-command-self-hosted-action with: - arguments: -p examples/ integrationTest --tests org.apache.beam.examples.WordCountIT + gradle-command: integrationTest + arguments: -p examples/ --tests org.apache.beam.examples.WordCountIT -DintegrationTestRunner=direct - -DintegrationTestPipelineOptions=["--runner=DirectRunner","--tempRoot=./tmp"] + -DintegrationTestPipelineOptions=[\"--runner=DirectRunner\",\"--tempRoot=./tmp\"] - name: Upload test logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: java_wordcount_direct_runner-${{matrix.os}} @@ -167,7 +161,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest,windows-latest] + os: [[self-hosted, ubuntu-20.04, main], windows-latest] if: | needs.check_gcp_variables.outputs.gcp-variables-set == 'true' && ( (github.event_name == 'push' || github.event_name == 'schedule') || @@ -191,19 +185,15 @@ jobs: service_account_key: ${{ secrets.GCP_SA_KEY }} project_id: ${{ secrets.GCP_PROJECT_ID }} export_default_credentials: true - - name: Remove default github maven configuration - # This step is a workaround to avoid a decryption issue of Beam's - # gradle-command-action plugin and github's provided maven - # settings.xml file - run: rm ~/.m2/settings.xml - name: Run WordCount - uses: ./.github/actions/gradle-command-action + uses: ./.github/actions/gradle-command-self-hosted-action with: - arguments: -p examples/ integrationTest --tests org.apache.beam.examples.WordCountIT - -DintegrationTestPipelineOptions=["--runner=DataflowRunner","--project=${{ secrets.GCP_PROJECT_ID }}","--tempRoot=gs://${{ secrets.GCP_TESTING_BUCKET }}/tmp/"] + gradle-command: integrationTest + arguments: -p examples/ --tests org.apache.beam.examples.WordCountIT + -DintegrationTestPipelineOptions=[\"--runner=DataflowRunner\",\"--project=${{ secrets.GCP_PROJECT_ID }}\",\"--tempRoot=gs://${{ secrets.GCP_TESTING_BUCKET }}/tmp/\"] -DintegrationTestRunner=dataflow - name: Upload test logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: java_wordcount_dataflow-${{matrix.os}} diff --git a/.github/workflows/local_env_tests.yml b/.github/workflows/local_env_tests.yml index 7dbff4feeb45..580ca7ed9f2b 100644 --- a/.github/workflows/local_env_tests.yml +++ b/.github/workflows/local_env_tests.yml @@ -46,10 +46,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-go@v4 + - uses: actions/setup-go@v5 with: go-version: '1.21' - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.8' - name: "Installing local env dependencies" @@ -64,10 +64,10 @@ jobs: runs-on: macos-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-go@v4 + - uses: actions/setup-go@v5 with: go-version: '1.21' - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.8' - name: "Installing local env dependencies" diff --git a/.github/workflows/python_dependency_tests.yml b/.github/workflows/python_dependency_tests.yml index 166899df90cb..1c4012029d56 100644 --- a/.github/workflows/python_dependency_tests.yml +++ b/.github/workflows/python_dependency_tests.yml @@ -35,7 +35,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.params.py_ver }} - name: Install base_image_requirements.txt diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml index 0309329e84e1..a179939e4df2 100644 --- a/.github/workflows/python_tests.yml +++ b/.github/workflows/python_tests.yml @@ -45,7 +45,7 @@ jobs: check_gcp_variables: timeout-minutes: 5 name: "Check GCP variables" - runs-on: ubuntu-latest + runs-on: [self-hosted, ubuntu-20.04, main] outputs: gcp-variables-set: ${{ steps.check_gcp_variables.outputs.gcp-variables-set }} steps: @@ -70,12 +70,12 @@ jobs: ) needs: - check_gcp_variables - runs-on: ubuntu-latest + runs-on: [self-hosted, ubuntu-20.04, main] steps: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Build source @@ -85,7 +85,7 @@ jobs: working-directory: ./sdks/python/dist run: mv $(ls | grep "apache-beam.*tar\.gz") apache-beam-source.tar.gz - name: Upload compressed sources as artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: python_sdk_source path: sdks/python/dist/apache-beam-source.tar.gz @@ -107,7 +107,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.params.py_ver }} - name: Install tox @@ -121,7 +121,7 @@ jobs: working-directory: ./sdks/python run: tox -c tox.ini run -e ${{ matrix.params.tox_env }}-win - name: Upload test logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() with: name: pytest-${{matrix.os}}-${{matrix.params.py_ver}} @@ -133,13 +133,13 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [[self-hosted, ubuntu-20.04, main], macos-latest, windows-latest] python: ["3.8", "3.9", "3.10", "3.11"] steps: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install requirements @@ -158,21 +158,21 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [[self-hosted, ubuntu-20.04, main], macos-latest, windows-latest] python: ["3.8", "3.9", "3.10", "3.11"] steps: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: go-version: '1.21' - name: Download source from artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: python_sdk_source path: apache-beam-source diff --git a/.github/workflows/run_perf_alert_tool.yml b/.github/workflows/run_perf_alert_tool.yml index bc59bd945fe2..1f623571acde 100644 --- a/.github/workflows/run_perf_alert_tool.yml +++ b/.github/workflows/run_perf_alert_tool.yml @@ -37,7 +37,7 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Authenticate on GCP diff --git a/.github/workflows/run_rc_validation.yml b/.github/workflows/run_rc_validation.yml index 35fb3ce05eae..15979a9e1acd 100644 --- a/.github/workflows/run_rc_validation.yml +++ b/.github/workflows/run_rc_validation.yml @@ -114,13 +114,13 @@ jobs: ref: ${{env.RC_TAG}} - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{matrix.py_version}} - name: Setup Java JDK - uses: actions/setup-java@v3.8.0 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 @@ -187,13 +187,13 @@ jobs: uses: azure/setup-kubectl@v3 - name: Setup Java JDK - uses: actions/setup-java@v3.8.0 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{matrix.py_version}} @@ -284,7 +284,7 @@ jobs: with: ref: ${{env.RC_TAG}} - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' @@ -360,7 +360,7 @@ jobs: ref: ${{env.RC_TAG}} - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{matrix.py_version}} @@ -408,7 +408,7 @@ jobs: ref: ${{env.RC_TAG}} - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{matrix.py_version}} @@ -460,7 +460,7 @@ jobs: with: ref: ${{env.RC_TAG}} - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{matrix.py_version}} @@ -509,7 +509,7 @@ jobs: with: ref: ${{env.RC_TAG}} - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{matrix.py_version}} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 959b2009aa4a..e16aa75b9730 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -28,7 +28,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-pr-message: 'This pull request has been marked as stale due to 60 days of inactivity. It will be closed in 1 week if no further activity occurs. If you think that’s incorrect or this pull request requires a review, please simply write any comment. If closed, you can revive the PR at any time and @mention a reviewer or discuss it on the dev@beam.apache.org list. Thank you for your contributions.' diff --git a/.github/workflows/tour_of_beam_backend.yml b/.github/workflows/tour_of_beam_backend.yml index 3585bd0e670f..e3a016a4b5a7 100644 --- a/.github/workflows/tour_of_beam_backend.yml +++ b/.github/workflows/tour_of_beam_backend.yml @@ -42,7 +42,7 @@ jobs: working-directory: ./learning/tour-of-beam/backend steps: - uses: actions/checkout@v4 - - uses: actions/setup-go@v4 + - uses: actions/setup-go@v5 with: # pin to the biggest Go version supported by Cloud Functions runtime go-version: '1.16' diff --git a/.github/workflows/typescript_tests.yml b/.github/workflows/typescript_tests.yml index e71834f09ffb..0fdcfb070a22 100644 --- a/.github/workflows/typescript_tests.yml +++ b/.github/workflows/typescript_tests.yml @@ -83,7 +83,7 @@ jobs: with: node-version: '16' - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Setup Beam Python @@ -138,7 +138,7 @@ jobs: with: node-version: '16' - name: Install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Setup Beam Python diff --git a/.gitmodules b/.gitmodules index fa6e30a8850a..3a6406c405f3 100644 --- a/.gitmodules +++ b/.gitmodules @@ -7,6 +7,3 @@ [submodule ".github/actions/github-push-action"] path = .github/actions/github-push-action url = https://github.com/ad-m/github-push-action -[submodule ".github/actions/gradle-command-action"] - path = .github/actions/gradle-command-action - url = https://github.com/eskatos/gradle-command-action diff --git a/.test-infra/jenkins/job_PostCommit_CommunityMetrics.groovy b/.test-infra/jenkins/job_PostCommit_CommunityMetrics.groovy deleted file mode 100644 index b6b958a830a4..000000000000 --- a/.test-infra/jenkins/job_PostCommit_CommunityMetrics.groovy +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties - -job('beam_Prober_CommunityMetrics') { - description('Health check probes for the Community Metrics infrastructure') - commonJobProperties.setTopLevelMainJobProperties(delegate) - - commonJobProperties.enablePhraseTriggeringFromPullRequest(delegate, - 'Community Metrics Prober', - 'Run Community Metrics Prober') - - commonJobProperties.setAutoJob(delegate) - - commonJobProperties.enablePhraseTriggeringFromPullRequest( - delegate, - 'Community Metrics Prober', - 'Run Community Metrics Prober') - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':communityMetricsProber') - switches('--rerun-tasks') - commonJobProperties.setGradleSwitches(delegate) - } - } -} diff --git a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Flink.groovy b/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Flink.groovy deleted file mode 100644 index 0d2a8612bdfe..000000000000 --- a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Flink.groovy +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties -import PostcommitJobBuilder - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - -// This job runs the suite of ValidatesRunner tests against the Flink runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_XVR_Flink', - 'Run XVR_Flink PostCommit', 'Flink CrossLanguageValidatesRunner Tests', this) { - description('Runs the CrossLanguageValidatesRunner suite on the Flink runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS.each { pythonVersion -> - shell("echo \"*** RUN CROSS-LANGUAGE FLINK USING PYTHON ${pythonVersion} ***\"") - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:job-server:validatesCrossLanguageRunner") - commonJobProperties.setGradleSwitches(delegate) - switches("-PpythonVersion=${pythonVersion}") - // only run non-python task (e.g. GoUsingJava) once - switches("-PskipNonPythonTask=${pythonVersion != CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS[0]}") - } - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_JavaUsingPython_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_JavaUsingPython_Dataflow.groovy deleted file mode 100644 index 764bf697cbee..000000000000 --- a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_JavaUsingPython_Dataflow.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - -// This job runs the suite of ValidatesRunner tests against the Dataflow runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_XVR_JavaUsingPython_Dataflow', - 'Run XVR_JavaUsingPython_Dataflow PostCommit', 'Dataflow CrossLanguageValidatesRunner JavaUsingPython Tests', this) { - description('Runs the CrossLanguageValidatesRunner JavaUsingPython suite on the Dataflow runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS.each { pythonVersion -> - shell("echo \"*** RUN CROSS-LANGUAGE DATAFLOW JAVA WITH PYTHON EXTERNAL TRANSFORMS USING PYTHON ${pythonVersion} ***\"") - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerJavaUsingPython') - commonJobProperties.setGradleSwitches(delegate) - switches("-PpythonVersion=${pythonVersion}") - } - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_PythonUsingJavaSQL_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_PythonUsingJavaSQL_Dataflow.groovy deleted file mode 100644 index 8c5d7f5be8fe..000000000000 --- a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_PythonUsingJavaSQL_Dataflow.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS - -// This job runs the suite of ValidatesRunner tests against the Dataflow runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow', - 'Run XVR_PythonUsingJavaSQL_Dataflow PostCommit', 'Dataflow CrossLanguageValidatesRunner PythonUsingJavaSQL Tests', this) { - description('Runs the CrossLanguageValidatesRunner PythonUsingJavaSQL suite on the Dataflow runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/pytest*.xml') - } - - // Gradle goals for this job. - steps { - CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS.each { pythonVersion -> - shell("echo \"*** RUN CROSS-LANGUAGE DATAFLOW PYTHON WITH JAVA SQL TRANSFORMS USING PYTHON ${pythonVersion} ***\"") - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerPythonUsingSql') - commonJobProperties.setGradleSwitches(delegate) - switches("-PpythonVersion=${pythonVersion}") - } - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Samza.groovy b/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Samza.groovy deleted file mode 100644 index 1319fdec1fcc..000000000000 --- a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Samza.groovy +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties -import PostcommitJobBuilder - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - -// This job runs the suite of ValidatesRunner tests against the Samza runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_XVR_Samza', - 'Run XVR_Samza PostCommit', 'Samza CrossLanguageValidatesRunner Tests', this) { - description('Runs the CrossLanguageValidatesRunner suite on the Samza runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS.each { pythonVersion -> - shell("echo \"*** RUN CROSS-LANGUAGE SAMZA USING PYTHON ${pythonVersion} ***\"") - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":runners:samza:job-server:validatesCrossLanguageRunner") - commonJobProperties.setGradleSwitches(delegate) - switches("-PpythonVersion=${pythonVersion}") - // only run non-python task (e.g. GoUsingJava) once - switches("-PskipNonPythonTask=${pythonVersion != CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS[0]}") - } - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Spark3.groovy b/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Spark3.groovy deleted file mode 100644 index eec4eac88278..000000000000 --- a/.test-infra/jenkins/job_PostCommit_CrossLanguageValidatesRunner_Spark3.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - -// This job runs the suite of ValidatesRunner tests against the Flink runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_XVR_Spark3', - 'Run XVR_Spark3 PostCommit', 'Spark3 CrossLanguageValidatesRunner Tests', this) { - description('Runs the CrossLanguageValidatesRunner suite on the Spark3 runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS.each { pythonVersion -> - shell("echo \"*** RUN CROSS-LANGUAGE SPARK3 USING PYTHON ${pythonVersion} ***\"") - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:job-server:validatesCrossLanguageRunner') - commonJobProperties.setGradleSwitches(delegate) - switches("-PpythonVersion=${pythonVersion}") - // only run non-python task (e.g. GoUsingJava) once - switches("-PskipNonPythonTask=${pythonVersion != CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS[0]}") - } - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Go.groovy b/.test-infra/jenkins/job_PostCommit_Go.groovy deleted file mode 100644 index 3d1c6a3b53ff..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Go.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This is the Go postcommit which runs a gradle build, and the current set -// of postcommit tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Go', 'Run Go PostCommit', - 'Go PostCommit (\"Run Go PostCommit\")', this) { - description('Runs Go PostCommit tests against master.') - previousNames(/beam_PostCommit_Go_GradleBuild/) - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties( - delegate, - 'master', - 300) // increased to 5 hours. - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':goPostCommit') - commonJobProperties.setGradleSwitches(delegate) - switches('--no-parallel') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Go_ValidatesRunner_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Go_ValidatesRunner_Flink.groovy deleted file mode 100644 index e0cd744dc434..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Go_ValidatesRunner_Flink.groovy +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of Go integration tests against the Flink runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Go_VR_Flink', - 'Run Go Flink ValidatesRunner', 'Go Flink ValidatesRunner Tests', this) { - description('Runs Go integration tests on the Flink runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 100, true, 'beam', true, 40) - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:go:test:flinkValidatesRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java.groovy b/.test-infra/jenkins/job_PostCommit_Java.groovy deleted file mode 100644 index cb3e0930dc89..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java', 'Run Java PostCommit', - 'Java SDK Post Commit Tests', this) { - - description('Runs PostCommit tests on the Java SDK.') - previousNames(/beam_PostCommit_Java_GradleBuild/) - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':javaPostCommit') - commonJobProperties.setGradleSwitches(delegate) - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Avro_Versions.groovy b/.test-infra/jenkins/job_PostCommit_Java_Avro_Versions.groovy deleted file mode 100644 index ea07cb263928..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Avro_Versions.groovy +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java tests that depends on Avro against different Avro API versions -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Avro_Versions', 'Run PostCommit_Java_Avro_Versions', - 'Java Avro Versions Post Commit Tests', this) { - - description('Java Avro Versions Post Commit Tests') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":javaAvroVersionsTest") - commonJobProperties.setGradleSwitches(delegate) - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Java_Dataflow.groovy deleted file mode 100644 index 2ab27da0f1b0..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Dataflow.groovy +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_DataflowV1', 'Run PostCommit_Java_Dataflow', - 'Dataflow Java Post Commit Tests', this) { - - description('Dataflow Java Post Commit Tests') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":runners:google-cloud-dataflow-java:postCommit") - commonJobProperties.setGradleSwitches(delegate) - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Dataflow_Examples_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Dataflow_Examples_Java11.groovy deleted file mode 100644 index 7e1f383ca3c5..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Dataflow_Examples_Java11.groovy +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import PostcommitJobBuilder -import CommonJobProperties as commonJobProperties - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Dataflow_Java11', - 'Run Java examples on Dataflow Java 11', 'Google Cloud Dataflow Runner Examples Java 11', this) { - - description('Runs the Java Examples suite on the Java 11 enabled Dataflow runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:examples:java11PostCommit') - - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Dataflow_Examples_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_Dataflow_Examples_Java17.groovy deleted file mode 100644 index 14df1a6724e9..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Dataflow_Examples_Java17.groovy +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import PostcommitJobBuilder -import CommonJobProperties as commonJobProperties - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Dataflow_Java17', - 'Run Java examples on Dataflow Java 17', 'Google Cloud Dataflow Runner Examples Java 17', this) { - - description('Runs the Java Examples suite on the Java 17 enabled Dataflow runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:examples:java17PostCommit') - - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2.groovy deleted file mode 100644 index 5bfda20db3cf..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2.groovy +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import PostcommitJobBuilder -import CommonJobProperties as commonJobProperties - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Dataflow_V2', - 'Run Java Examples on Dataflow Runner V2', 'Google Cloud Dataflow Runner V2 Examples', this) { - - description('Runs the Java Examples suite on Dataflow runner V2.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:examplesJavaRunnerV2IntegrationTest') - - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2_Java11.groovy deleted file mode 100644 index 6229f7c48a72..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2_Java11.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import PostcommitJobBuilder -import CommonJobProperties as commonJobProperties - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Dataflow_V2_java11', - 'Run Java 11 Examples on Dataflow Runner V2', 'Google Cloud Dataflow Runner V2 Examples Java 11', this) { - - description('Runs the Java Examples suite on Dataflow runner V2 using the Java 11 SDK harness.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:examplesJavaRunnerV2IntegrationTest') - - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - switches '-PdisableSpotlessCheck=true' - switches '-PdisableCheckStyle=true' - switches '-PtestJavaVersion=11' - switches '-PskipCheckerFramework' - switches "-Pjava11Home=${commonJobProperties.JAVA_11_HOME}" - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2_Java17.groovy deleted file mode 100644 index 7e52a7e09789..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Dataflow_V2_Java17.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import PostcommitJobBuilder -import CommonJobProperties as commonJobProperties - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Dataflow_V2_java17', - 'Run Java 17 Examples on Dataflow Runner V2', 'Google Cloud Dataflow Runner V2 Examples Java 17', this) { - - description('Runs the Java Examples suite on Dataflow runner V2 using the Java 17 SDK harness.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:examplesJavaRunnerV2IntegrationTest') - - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - switches '-PdisableSpotlessCheck=true' - switches '-PdisableCheckStyle=true' - switches '-PtestJavaVersion=17' - switches '-PskipCheckerFramework' - switches "-Pjava17Home=${commonJobProperties.JAVA_17_HOME}" - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy deleted file mode 100644 index dbea565102df..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Direct.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Java examples tests with DirectRunner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Direct', - 'Run Java Examples_Direct', 'Java Direct Runner Examples', this) { - - description('Run Java Examples on Direct Runner') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct:examplesIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy deleted file mode 100644 index bf759bac04ec..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Flink.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Java examples tests with FlinkRunner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Flink', - 'Run Java Examples_Flink', 'Java Flink Runner Examples', this) { - - description('Run Java Examples on Flink Runner') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:examplesIntegrationTest") - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy deleted file mode 100644 index f4af7dea6c0a..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Examples_Spark.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Java examples tests with SparkRunner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Examples_Spark', - 'Run Java Examples_Spark', 'Java Spark Runner Examples', this) { - - description('Run Java Examples on Spark Runner') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:examplesIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Hadoop_Versions.groovy b/.test-infra/jenkins/job_PostCommit_Java_Hadoop_Versions.groovy deleted file mode 100644 index dfca212d64fb..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Hadoop_Versions.groovy +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Hadoop_Versions', 'Run PostCommit_Java_Hadoop_Versions', - 'Java Hadoop Versions Post Commit Tests', this) { - - description('Java Hadoop Versions Post Commit Tests') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":javaHadoopVersionsTest") - commonJobProperties.setGradleSwitches(delegate) - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Dataflow_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Jpms_Dataflow_Java11.groovy deleted file mode 100644 index e65bc273431a..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Dataflow_Java11.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Jpms_Dataflow_Java11', 'Run Jpms Dataflow Java 11 PostCommit', - 'JPMS Java 11 Dataflow Post Commit Tests', this) { - - description('Runs JPMS tests on Dataflow using the Java 11 SDK.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:jpms-tests:dataflowRunnerIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_11_HOME}") - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Dataflow_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_Jpms_Dataflow_Java17.groovy deleted file mode 100644 index f518985ca7a8..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Dataflow_Java17.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Jpms_Dataflow_Java17', 'Run Jpms Dataflow Java 17 PostCommit', - 'JPMS Java 17 Dataflow Post Commit Tests', this) { - - description('Runs JPMS tests on Dataflow using the Java 17 SDK.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:jpms-tests:dataflowRunnerIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - switches("-PskipCheckerFramework") - switches("-PtestJavaVersion=17") - switches("-Pjava17Home=${commonJobProperties.JAVA_17_HOME}") - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Direct_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Jpms_Direct_Java11.groovy deleted file mode 100644 index 9faf78d5694a..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Direct_Java11.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Jpms_Direct_Java11', 'Run Jpms Direct Java 11 PostCommit', - 'JPMS Java 11 direct runner Post Commit Tests', this) { - - description('Runs JPMS tests on the direct runner using the Java 11 SDK.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:jpms-tests:directRunnerIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_11_HOME}") - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Direct_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_Jpms_Direct_Java17.groovy deleted file mode 100644 index 04c31389ecbe..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Direct_Java17.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Jpms_Direct_Java17', 'Run Jpms Direct Java 17 PostCommit', - 'JPMS Java 17 direct runner Post Commit Tests', this) { - - description('Runs JPMS tests on the direct runner using the Java 17 SDK.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:jpms-tests:directRunnerIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - switches("-PskipCheckerFramework") - switches("-PtestJavaVersion=17") - switches("-Pjava17Home=${commonJobProperties.JAVA_17_HOME}") - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Flink_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Jpms_Flink_Java11.groovy deleted file mode 100644 index fcd9a0d4ea2f..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Flink_Java11.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Jpms_Flink_Java11', 'Run Jpms Flink Java 11 PostCommit', - 'JPMS Java 11 Flink Post Commit Tests', this) { - - description('Runs JPMS tests on Flink using the Java 11 SDK.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:jpms-tests:flinkRunnerIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_11_HOME}") - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Spark_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Jpms_Spark_Java11.groovy deleted file mode 100644 index 35a9ef961f95..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Jpms_Spark_Java11.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Jpms_Spark_Java11', 'Run Jpms Spark Java 11 PostCommit', - 'JPMS Java 11 Spark Post Commit Tests', this) { - - description('Runs JPMS tests on Spark using the Java 11 SDK.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:jpms-tests:sparkRunnerIntegrationTest') - commonJobProperties.setGradleSwitches(delegate) - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_11_HOME}") - // Specify maven home on Jenkins, needed by Maven archetype integration tests. - switches('-Pmaven_home=/home/jenkins/tools/maven/apache-maven-3.5.4') - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow.groovy deleted file mode 100644 index 868bc69a8468..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -// This job runs the suite of ValidatesRunner tests against the Dataflow runner. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Dataflow', - 'Dataflow Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Dataflow', - 'Run Dataflow Runner Nexmark Tests', 'Dataflow Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'region' : 'us-central1', - 'suite' : 'STRESS', - 'numWorkers' : 4, - 'maxNumWorkers' : 4, - 'autoscalingAlgorithm' : 'NONE', - 'nexmarkParallel' : 16, - 'enforceEncodability' : true, - 'enforceImmutability' : true - ] - Nexmark.standardJob(delegate, Runner.DATAFLOW, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2.groovy deleted file mode 100644 index a8ea21876f91..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -// This job runs the suite of Nexmark tests against the Dataflow runner V2. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Dataflow_V2', - 'Dataflow Runner V2 Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner V2.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_DataflowV2', - 'Run Dataflow Runner V2 Nexmark Tests', 'Dataflow Runner V2 Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner V2 against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'influxTags' : '{\\\"runnerVersion\\\":\\\"V2\\\",\\\"javaVersion\\\":\\\"8\\\"}', - 'exportSummaryToBigQuery' : false, - 'region' : 'us-central1', - 'suite' : 'STRESS', - 'numWorkers' : 4, - 'maxNumWorkers' : 4, - 'autoscalingAlgorithm' : 'NONE', - 'nexmarkParallel' : 16, - 'enforceEncodability' : true, - 'enforceImmutability' : true - ] - - def final JOB_SPECIFIC_SWITCHES = [ - '-Pnexmark.runner.version="V2"' - ] - - Nexmark.nonQueryLanguageJobs(delegate, Runner.DATAFLOW, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext, JOB_SPECIFIC_SWITCHES, Nexmark.DEFAULT_JAVA_RUNTIME_VERSION) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2_Java11.groovy deleted file mode 100644 index 16866841cc72..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2_Java11.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -// This job runs the suite of Nexmark tests against the Dataflow runner V2. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Dataflow_V2_Java11', - 'Dataflow Runner V2 Java 11 Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner V2 on Java 11.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_DataflowV2_Java11', - 'Run Dataflow Runner V2 Java 11 Nexmark Tests', 'Dataflow Runner V2 Java 11 Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner V2 on Java 11 against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'influxTags' : '{\\\"runnerVersion\\\":\\\"V2\\\",\\\"javaVersion\\\":\\\"11\\\"}', - 'exportSummaryToBigQuery' : false, - 'region' : 'us-central1', - 'suite' : 'STRESS', - 'numWorkers' : 4, - 'maxNumWorkers' : 4, - 'autoscalingAlgorithm' : 'NONE', - 'nexmarkParallel' : 16, - 'enforceEncodability' : true, - 'enforceImmutability' : true - ] - - def final JOB_SPECIFIC_SWITCHES = [ - '-Pnexmark.runner.version="V2"' - ] - - Nexmark.nonQueryLanguageJobs(delegate, Runner.DATAFLOW, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext, JOB_SPECIFIC_SWITCHES, Nexmark.JAVA_11_RUNTIME_VERSION) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2_Java17.groovy deleted file mode 100644 index 40bfb477b568..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Dataflow_V2_Java17.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -// This job runs the suite of Nexmark tests against the Dataflow runner V2. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Dataflow_V2_Java17', - 'Dataflow Runner V2 Java 17 Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner V2 on Java 17.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_DataflowV2_Java17', - 'Run Dataflow Runner V2 Java 17 Nexmark Tests', 'Dataflow Runner V2 Java 17 Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Dataflow runner V2 on Java 17 against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'influxTags' : '{\\\"runnerVersion\\\":\\\"V2\\\",\\\"javaVersion\\\":\\\"17\\\"}', - 'exportSummaryToBigQuery' : false, - 'region' : 'us-central1', - 'suite' : 'STRESS', - 'numWorkers' : 4, - 'maxNumWorkers' : 4, - 'autoscalingAlgorithm' : 'NONE', - 'nexmarkParallel' : 16, - 'enforceEncodability' : true, - 'enforceImmutability' : true - ] - - def final JOB_SPECIFIC_SWITCHES = [ - '-Pnexmark.runner.version="V2"' - ] - - Nexmark.nonQueryLanguageJobs(delegate, Runner.DATAFLOW, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext, JOB_SPECIFIC_SWITCHES, Nexmark.JAVA_17_RUNTIME_VERSION) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Direct.groovy deleted file mode 100644 index c441c1e73e39..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Direct.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Direct', - 'Direct Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Direct runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Direct', - 'Run Direct Runner Nexmark Tests', 'Direct Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Direct runner against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'suite' : 'SMOKE', - 'enforceEncodability' : true, - 'enforceImmutability' : true - ] - Nexmark.standardJob(delegate, Runner.DIRECT, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Flink.groovy deleted file mode 100644 index cdbb2ab8afec..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Flink.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -// This job runs the suite of ValidatesRunner tests against the Flink runner. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Flink', - 'Flink Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Flink runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf', true, 40) - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Flink', - 'Run Flink Runner Nexmark Tests', 'Flink Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Flink runner against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam', true, 40) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'suite' : 'SMOKE', - 'streamTimeout' : 60, - ] - - Nexmark.standardJob(delegate, Runner.FLINK, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy deleted file mode 100644 index 7df873f63067..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -// This job runs the suite of ValidatesRunner tests against the Spark runner. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Spark', - 'Spark Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Spark runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Nexmark_Spark', - 'Run Spark Runner Nexmark Tests', 'Spark Runner Nexmark Tests', this) { - - description('Runs the Nexmark suite on the Spark runner against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'suite' : 'SMOKE', - 'streamTimeout': 60 - ] - // Spark doesn't run streaming jobs, therefore run only batch variants. - Nexmark.batchOnlyJob(delegate, Runner.SPARK, SDK.JAVA, JOB_SPECIFIC_OPTIONS, triggeringContext) - - def final SPARK_STRUCTURED_STREAMING_JOB_SPECIFIC_OPTIONS = [ - 'suite' : 'SMOKE', - 'streamTimeout': 60, - // Skip query 3 (SparkStructuredStreamingRunner does not support State/Timers yet) - 'skipQueries' : 3, - ] - Nexmark.batchOnlyJob(delegate, Runner.SPARK_STRUCTURED_STREAMING, SDK.JAVA, SPARK_STRUCTURED_STREAMING_JOB_SPECIFIC_OPTIONS, triggeringContext) -} diff --git a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Flink_Streaming.groovy b/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Flink_Streaming.groovy deleted file mode 100644 index fb3e24867f8c..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Flink_Streaming.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties -import PostcommitJobBuilder - -// This job runs the suite of ValidatesRunner tests against the Flink runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_PVR_Flink_Streaming', - 'Run Java Flink PortableValidatesRunner Streaming', 'Java Flink PortableValidatesRunner Streaming Tests', this) { - description('Runs the Java PortableValidatesRunner suite on the Flink runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:job-server:validatesPortableRunnerStreaming") - commonJobProperties.setGradleSwitches(delegate) - } - // TODO(https://github.com/apache/beam/issues/20668): Enable this test suite once we have support. - //gradle { - // rootBuildScriptDir(commonJobProperties.checkoutDir) - // tasks(":runners:flink:${CommonTestProperties.getFlinkVersion()}:job-server:validatesPortableRunnerStreamingCheckpoint") - // commonJobProperties.setGradleSwitches(delegate) - //} - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Samza.groovy b/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Samza.groovy deleted file mode 100644 index 881d4e1ca85f..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Samza.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of Java ValidatesRunner tests against the Samza runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_PVR_Samza', - 'Run Java Samza PortableValidatesRunner', 'Java Samza PortableValidatesRunner Tests', this) { - description('Runs the Java PortableValidatesRunner suite on the Samza runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:samza:job-server:validatesPortableRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Spark3_Streaming.groovy b/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Spark3_Streaming.groovy deleted file mode 100644 index 2164ab554b3b..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Spark3_Streaming.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of Java ValidatesRunner tests against the Spark runner in streaming mode. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_PVR_Spark3_Streaming', - 'Run Java Spark v3 PortableValidatesRunner Streaming', 'Java Spark v3 PortableValidatesRunner Streaming Tests', this) { - description('Runs the Java PortableValidatesRunner suite on the Spark v3 runner in streaming mode.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:job-server:validatesPortableRunnerStreaming') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Spark_Batch.groovy b/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Spark_Batch.groovy deleted file mode 100644 index ba08cc2e4404..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_PortableValidatesRunner_Spark_Batch.groovy +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of Java ValidatesRunner tests against the Spark runner in batch mode. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_PVR_Spark_Batch', - 'Run Java Spark PortableValidatesRunner Batch', 'Java Spark PortableValidatesRunner Batch Tests', this) { - description('Runs the Java PortableValidatesRunner suite on the Spark runner in batch mode.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:job-server:validatesPortableRunnerBatch') - tasks(':runners:spark:3:job-server:validatesPortableRunnerDocker') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Sickbay.groovy b/.test-infra/jenkins/job_PostCommit_Java_Sickbay.groovy deleted file mode 100644 index 40e6ac61fa83..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Sickbay.groovy +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Java sickbay tests. -this.buildSchedule = 'H H * * *' -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_Sickbay', - 'Run Java Sickbay', 'Java Sickbay Tests', this) { - - description('Run Java Sickbay Tests') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Execute shell command to run sickbay tests. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':javaPostCommitSickbay') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_SingleStoreIO_IT.groovy b/.test-infra/jenkins/job_PostCommit_Java_SingleStoreIO_IT.groovy deleted file mode 100644 index 901b76364333..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_SingleStoreIO_IT.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as common -import PostcommitJobBuilder -import Kubernetes - -String jobName = "beam_PostCommit_Java_SingleStoreIO_IT" - -void waitForPodWithLabel(job, Kubernetes k8s, String label) { - job.steps { - shell("${k8s.KUBERNETES_DIR}/singlestore/wait-for-pod-with-label.sh ${label} 600") - } -} - -void waitFor(job, Kubernetes k8s, String resource) { - job.steps { - shell("${k8s.KUBERNETES_DIR}/singlestore/wait-for.sh ${resource} 600") - } -} - - -// This job runs the integration test of java SingleStoreIO class. -PostcommitJobBuilder.postCommitJob(jobName, - 'Run Java SingleStoreIO_IT', 'Java SingleStoreIO Integration Test',this) { - description('Runs the Java SingleStoreIO Integration Test.') - - // Set common parameters. - common.setTopLevelMainJobProperties(delegate) - - // Deploy SingleStoreDB cluster - String namespace = common.getKubernetesNamespace(jobName) - String kubeconfigPath = common.getKubeconfigLocationForNamespace(namespace) - Kubernetes k8s = Kubernetes.create(delegate, kubeconfigPath, namespace) - - k8s.apply(common.makePathAbsolute("src/.test-infra/kubernetes/singlestore/sdb-rbac.yaml")) - k8s.apply(common.makePathAbsolute("src/.test-infra/kubernetes/singlestore/sdb-cluster-crd.yaml")) - k8s.apply(common.makePathAbsolute("src/.test-infra/kubernetes/singlestore/sdb-operator.yaml")) - waitForPodWithLabel(delegate, k8s, "sdb-operator") - - k8s.apply(common.makePathAbsolute("src/.test-infra/kubernetes/singlestore/sdb-cluster.yaml")) - waitFor(delegate, k8s, "memsqlclusters.memsql.com") - - String singlestoreHostName = "LOAD_BALANCER_IP" - k8s.loadBalancerIP("svc-sdb-cluster-ddl", singlestoreHostName) - - // Define test options - Map pipelineOptions = [ - tempRoot : 'gs://temp-storage-for-perf-tests', - project : 'apache-beam-testing', - runner : 'DataflowRunner', - singleStoreServerName : "\$${singlestoreHostName}", - singleStoreUsername : "admin", - singleStorePassword : "secretpass", - singleStorePort: "3306", - numberOfRecords: "1000", - ] - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(common.checkoutDir) - common.setGradleSwitches(delegate) - switches("--info") - switches("-DintegrationTestPipelineOptions=\'${common.joinPipelineOptions(pipelineOptions)}\'") - switches("-DintegrationTestRunner=dataflow") - tasks(":sdks:java:io:singlestore:integrationTest --tests org.apache.beam.sdk.io.singlestore.SingleStoreIODefaultMapperIT") - tasks(":sdks:java:io:singlestore:integrationTest --tests org.apache.beam.sdk.io.singlestore.SingleStoreIOSchemaTransformIT") - tasks(":sdks:java:io:singlestore:integrationTest --tests org.apache.beam.sdk.io.singlestore.SingleStoreIOConnectionAttributesIT") - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Dataflow.groovy deleted file mode 100644 index f2152861be90..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Dataflow.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder -import InfluxDBCredentialsHelper - -import static TpcdsDatabaseProperties.tpcdsBigQueryArgs -import static TpcdsDatabaseProperties.tpcdsInfluxDBArgs -import static TpcdsDatabaseProperties.tpcdsQueriesArg - - -// This job runs the suite of Tpcds tests against the Dataflow runner. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Tpcds_Dataflow', - 'Dataflow Runner Tpcds Tests', this) { - description('Runs the Tpcds suite on the Dataflow runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - InfluxDBCredentialsHelper.useCredentials(delegate) - - // Gradle goals for this job. - steps { - shell('echo "*** RUN TPC-DS USING DATAFLOW RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:google-cloud-dataflow-java"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=DataflowRunner', - '--region=us-central1', - '--numWorkers=4', - '--maxNumWorkers=4', - '--autoscalingAlgorithm=NONE', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/dataflow/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - } - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Tpcds_Dataflow', - 'Run Dataflow Runner Tpcds Tests', 'Dataflow Runner Tpcds Tests', this) { - - description('Runs the Tpcds suite on the Dataflow runner against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Gradle goals for this job. - steps { - shell('echo "*** RUN TPC-DS USING DATAFLOW RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:google-cloud-dataflow-java"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=DataflowRunner', - '--region=us-central1', - '--numWorkers=4', - '--maxNumWorkers=4', - '--autoscalingAlgorithm=NONE', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/dataflow/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Flink.groovy deleted file mode 100644 index b6bff8cfffd3..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Flink.groovy +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder -import InfluxDBCredentialsHelper - -import static TpcdsDatabaseProperties.tpcdsBigQueryArgs -import static TpcdsDatabaseProperties.tpcdsInfluxDBArgs -import static TpcdsDatabaseProperties.tpcdsQueriesArg - -// This job runs the Tpcds benchmark suite against the Flink runner. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Tpcds_Flink', - 'Flink Runner Tpcds Tests', this) { - description('Runs the Tpcds suite on the Flink runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - InfluxDBCredentialsHelper.useCredentials(delegate) - - // Gradle goals for this job. - steps { - shell('echo "*** RUN TPC-DS USING FLINK RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:flink:1.13"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=FlinkRunner', - '--parallelism=4', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/flink/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - } - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Tpcds_Flink', - 'Run Flink Runner Tpcds Tests', 'Flink Runner Tpcds Tests', this) { - - description('Runs the Tpcds suite on the Flink runner against a Pull Request, on demand.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - InfluxDBCredentialsHelper.useCredentials(delegate) - - // Gradle goals for this job. - steps { - shell('echo "*** RUN TPC-DS USING FLINK RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:flink:1.13"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=FlinkRunner', - '--parallelism=4', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/flink/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Spark.groovy deleted file mode 100644 index 0bd28b232a9e..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_Tpcds_Spark.groovy +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder -import InfluxDBCredentialsHelper - -import static TpcdsDatabaseProperties.tpcdsBigQueryArgs -import static TpcdsDatabaseProperties.tpcdsInfluxDBArgs -import static TpcdsDatabaseProperties.tpcdsQueriesArg - -// This job runs the Tpcds benchmark suite against the Spark runner. -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Tpcds_Spark', - 'Spark Runner Tpcds Tests', this) { - description('Runs the Tpcds suite on the Spark runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - InfluxDBCredentialsHelper.useCredentials(delegate) - - // Gradle goals for this job. - steps { - shell('echo "*** RUN TPC-DS USING SPARK 3 RDD RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:spark:3"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=SparkRunner', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/spark3-rdd/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - shell('echo "*** RUN TPC-DS USING SPARK 3 DATASET RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:spark:3"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=SparkStructuredStreamingRunner', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/spark3-dataset/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - } - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Java_Tpcds_Spark', - 'Run Spark Runner Tpcds Tests', 'Spark Runner Tpcds Tests', this) { - - description('Runs the Tpcds suite on the Spark runner against a Pull Request, on demand.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - InfluxDBCredentialsHelper.useCredentials(delegate) - - // Gradle goals for this job. - steps { - shell('echo "*** RUN TPC-DS USING SPARK 3 RDD RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:spark:3"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=SparkRunner', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/spark3-rdd/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - shell('echo "*** RUN TPC-DS USING SPARK 3 DATASET RUNNER ***"') - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:testing:tpcds:run') - commonJobProperties.setGradleSwitches(delegate) - switches('-Ptpcds.runner=":runners:spark:3"' + - ' -Ptpcds.args="' + - [ - commonJobProperties.mapToArgString(tpcdsBigQueryArgs), - commonJobProperties.mapToArgString(tpcdsInfluxDBArgs), - '--runner=SparkStructuredStreamingRunner', - '--dataSize=1GB', - '--sourceType=PARQUET', - '--dataDirectory=gs://beam-tpcds/datasets/parquet/nonpartitioned', - '--resultsDirectory=gs://beam-tpcds/results/spark3-dataset/', - '--tpcParallel=1', - '--queries=' + tpcdsQueriesArg - ].join(' ')) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow.groovy deleted file mode 100644 index b0c0fa2c6e47..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests against the Dataflow -// runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Dataflow', - 'Run Dataflow ValidatesRunner', 'Google Cloud Dataflow Runner ValidatesRunner Tests (streaming/batch auto)', this) { - - description('Runs the ValidatesRunner suite on the Dataflow runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 480) - previousNames(/beam_PostCommit_Java_ValidatesRunner_Dataflow_Gradle/) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesRunner') - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Java11.groovy deleted file mode 100644 index bb71f605cd4f..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Java11.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Dataflow_Java11', - 'Run Dataflow ValidatesRunner Java 11', 'Google Cloud Dataflow Runner ValidatesRunner Tests On Java 11', this) { - - description('Runs the ValidatesRunner suite on the Dataflow runner with Java 11 worker harness.') - - def JAVA_11_HOME = '/usr/lib/jvm/java-11-openjdk-amd64' - def JAVA_8_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 480) - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:testJar') - tasks(':runners:google-cloud-dataflow-java:worker:shadowJar') - switches("-Dorg.gradle.java.home=${JAVA_8_HOME}") - } - - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesRunner') - switches('-x shadowJar') - switches('-x shadowTestJar') - switches('-x compileJava') - switches('-x compileTestJava') - switches('-x jar') - switches('-x testJar') - switches('-x classes') - switches('-x testClasses') - switches("-Dorg.gradle.java.home=${JAVA_11_HOME}") - - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Java17.groovy deleted file mode 100644 index 7bfcc256619f..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Java17.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Dataflow_Java17', - 'Run Dataflow ValidatesRunner Java 17', 'Google Cloud Dataflow Runner ValidatesRunner Tests On Java 17', this) { - - description('Runs the ValidatesRunner suite on the Dataflow runner with Java 17 worker harness.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 480) - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:testJar') - tasks(':runners:google-cloud-dataflow-java:worker:shadowJar') - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_8_HOME}") - } - - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesRunner') - switches('-x shadowJar') - switches('-x shadowTestJar') - switches('-x compileJava') - switches('-x compileTestJava') - switches('-x jar') - switches('-x testJar') - switches('-x classes') - switches('-x testClasses') - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_17_HOME}") - - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.groovy deleted file mode 100644 index 5d5c060175b9..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_Streaming.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests against the Dataflow -// runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Dataflow_Streaming', - 'Run Dataflow Streaming ValidatesRunner', 'Google Cloud Dataflow Runner ValidatesRunner Tests (streaming)', this) { - - description('Runs the ValidatesRunner suite on the Dataflow runner forcing streaming mode.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 720) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesRunnerStreaming') - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_V2.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_V2.groovy deleted file mode 100644 index abb3c9f5b442..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_V2.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests against the Dataflow -// runner V2. -this.buildSchedule = 'H H/8 * * *' -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_VR_Dataflow_V2', - 'Run Java Dataflow V2 ValidatesRunner', 'Google Cloud Dataflow Runner V2 Java ValidatesRunner Tests', this) { - - description('Runs Java ValidatesRunner suite on the Dataflow runner V2.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 390) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesRunnerV2') - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.groovy deleted file mode 100644 index 3c4b5bf3a750..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Dataflow_V2_Streaming.groovy +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests against the Dataflow -// runner V2. -this.buildSchedule = 'H H/8 * * *' -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_VR_Dataflow_V2_Streaming', - 'Run Java Dataflow V2 ValidatesRunner Streaming', 'Google Cloud Dataflow Runner V2 Java ValidatesRunner Tests (streaming)', this) { - - description('Runs Java ValidatesRunner suite on the Dataflow runner V2 forcing streaming mode.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 510) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:google-cloud-dataflow-java:validatesRunnerV2Streaming') - // Increase parallel worker threads above processor limit since most time is - // spent waiting on Dataflow jobs. ValidatesRunner tests on Dataflow are slow - // because each one launches a Dataflow job with about 3 mins of overhead. - // 3 x num_cores strikes a good balance between maxing out parallelism without - // overloading the machines. - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct.groovy deleted file mode 100644 index 13173390be42..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests against the Direct -// runner compiled with Java 8. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Direct', - 'Run Direct ValidatesRunner', 'Direct Runner ValidatesRunner Tests for Java', this) { - - description('Builds the Direct Runner and runs ValidatesRunner test suite in Java.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct-java:validatesRunner') - } - } - - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct_Java11.groovy deleted file mode 100644 index ee9a99d4d834..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct_Java11.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests using Java 11 against the Direct -// runner compiled with Java 8. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Direct_Java11', - 'Run Direct ValidatesRunner Java 11', 'Direct Runner ValidatesRunner Tests for Java 11', this) { - - description('Builds the Direct Runner with Java 8 and runs ValidatesRunner test suite in Java 11.') - - def JAVA_11_HOME = '/usr/lib/jvm/java-11-openjdk-amd64' - def JAVA_8_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct-java:shadowJar') - tasks(':runners:direct-java:shadowTestJar') - switches("-Dorg.gradle.java.home=${JAVA_8_HOME}") - } - - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct-java:validatesRunner') - switches("-Dorg.gradle.java.home=${JAVA_11_HOME}") - switches('-x shadowJar') - switches('-x shadowTestJar') - switches('-x compileJava') - switches('-x compileTestJava') - } - } - - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct_Java17.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct_Java17.groovy deleted file mode 100644 index 50681914cd5c..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Direct_Java17.groovy +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests using Java 17 against the Direct -// runner compiled with Java 8. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Direct_Java17', - 'Run Direct ValidatesRunner Java 17', 'Direct Runner ValidatesRunner Tests for Java 17', this) { - - description('Builds the Direct Runner with Java 8 and runs ValidatesRunner test suite in Java 17.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct-java:shadowJar') - tasks(':runners:direct-java:shadowTestJar') - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_8_HOME}") - } - - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:direct-java:validatesRunner') - switches("-Dorg.gradle.java.home=${commonJobProperties.JAVA_17_HOME}") - switches('-x shadowJar') - switches('-x shadowTestJar') - switches('-x compileJava') - switches('-x compileTestJava') - } - } - - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Samza.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Samza.groovy deleted file mode 100644 index 44231c2b07ec..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Samza.groovy +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of ValidatesRunner tests against the Samza runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Samza', - 'Run Samza ValidatesRunner', 'Apache Samza Runner ValidatesRunner Tests', this) { - description('Runs the ValidatesRunner suite on the Samza runner.') - previousNames(/beam_PostCommit_Java_ValidatesRunner_Samza_Gradle/) - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:samza:validatesRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Spark.groovy deleted file mode 100644 index 0cb6bd2d3710..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Spark.groovy +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of ValidatesRunner tests against the Spark runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Spark', - 'Run Spark ValidatesRunner', 'Apache Spark Runner ValidatesRunner Tests', this) { - description('Runs the ValidatesRunner suite on the Spark runner.') - previousNames(/beam_PostCommit_Java_ValidatesRunner_Spark_Gradle/) - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:validatesRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.groovy deleted file mode 100644 index 31d5d5cf07b4..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of ValidatesRunner tests against the Spark runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_SparkStructuredStreaming', - 'Run Spark StructuredStreaming ValidatesRunner', 'Apache Spark SparkStructuredStreaming Runner ValidatesRunner Tests', this) { - description('Runs the ValidatesRunner suite on the Structured Streaming Spark runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:validatesStructuredStreamingRunnerBatch') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Spark_Java11.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Spark_Java11.groovy deleted file mode 100644 index d0da52927b8a..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Spark_Java11.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Spark_Java11', - 'Run Spark ValidatesRunner Java 11', 'Apache Spark Runner ValidatesRunner Tests On Java 11', this) { - - description('Runs the ValidatesRunner suite on the Spark runner with Java 11.') - - def JAVA_11_HOME = '/usr/lib/jvm/java-11-openjdk-amd64' - def JAVA_8_HOME = '/usr/lib/jvm/java-8-openjdk-amd64' - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 270) - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:jar') - tasks(':runners:spark:3:testJar') - switches("-Dorg.gradle.java.home=${JAVA_8_HOME}") - } - - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:spark:3:validatesRunner') - switches('-x shadowJar') - switches('-x shadowTestJar') - switches('-x compileJava') - switches('-x compileTestJava') - switches('-x jar') - switches('-x testJar') - switches('-x classes') - switches('-x testClasses') - switches("-Dorg.gradle.java.home=${JAVA_11_HOME}") - - commonJobProperties.setGradleSwitches(delegate, 3 * Runtime.runtime.availableProcessors()) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Twister2.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Twister2.groovy deleted file mode 100644 index fa33ded11187..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_Twister2.groovy +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of ValidatesRunner tests against the Twister2 runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_Twister2', - 'Run Twister2 ValidatesRunner', 'Twister2 Runner ValidatesRunner Tests', this) { - description('Runs the ValidatesRunner suite on the Twister2 runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - previousNames(/beam_PostCommit_Java_ValidatesRunner_Twister2_Gradle/) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:twister2:validatesRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_ULR_LOOPBACK.groovy b/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_ULR_LOOPBACK.groovy deleted file mode 100644 index 855e8ac5400e..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Java_ValidatesRunner_ULR_LOOPBACK.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -// This job runs the suite of ValidatesRunner tests against the Direct -// runner compiled with Java 8. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Java_ValidatesRunner_ULR', - 'Run ULR Loopback ValidatesRunner', 'Universal Local Runner ValidatesRunner Tests for Java, LOOPBACK mode', this) { - - description('Builds the Universal Local Runner and runs the Java ValidatesRunner test suite in LOOPBACK mode (no Docker).') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':runners:portability:java:ulrLoopbackValidatesRunner') - } - } - - } diff --git a/.test-infra/jenkins/job_PostCommit_Javadoc.groovy b/.test-infra/jenkins/job_PostCommit_Javadoc.groovy deleted file mode 100644 index ae2ab53bd3fe..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Javadoc.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Javadoc', 'Run Javadoc PostCommit', - 'Javadoc generation', this) { - - description('PostCommit job that generates aggregated Javadoc') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - publishers { - archiveJavadoc { - javadocDir("src/sdks/java/javadoc/build/docs/javadoc") - keepAll(false) - } - recordIssues { - tools { - java() - javaDoc() - } - enabledForFailure(true) - } - } - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:java:javadoc:aggregateJavadoc') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy b/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy deleted file mode 100644 index 0c6f51f8be54..000000000000 --- a/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// Tests creation and execution of portable pipeline Jars on the Flink runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_PortableJar_Flink', - 'Run PortableJar_Flink PostCommit', 'Flink Portable Jar Tests', this) { - description('Tests creation and execution of portable pipeline Jars on the Flink runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:portable:py38:testPipelineJarFlinkRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy b/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy deleted file mode 100644 index 1f1963a9b2e4..000000000000 --- a/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// Tests creation and execution of portable pipeline Jars on the Spark runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_PortableJar_Spark', - 'Run PortableJar_Spark PostCommit', 'Spark Portable Jar Tests', this) { - description('Tests creation and execution of portable pipeline Jars on the Spark runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:portable:py38:testPipelineJarSparkRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Python_Chicago_Taxi_Example_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Python_Chicago_Taxi_Example_Dataflow.groovy deleted file mode 100644 index bd04d3e0c3a5..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_Chicago_Taxi_Example_Dataflow.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PhraseTriggeringPostCommitBuilder -import CronJobBuilder -import LoadTestsBuilder - -def chicagoTaxiJob = { scope -> - scope.description('Runs the Chicago Taxi Example on the Dataflow runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(scope) - - def pipelineOptions = [ - num_workers : 5, - autoscaling_algorithm: 'NONE', - ] - - // Gradle goals for this job. - scope.steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - commonJobProperties.setGradleSwitches(delegate) - tasks(':sdks:python:test-suites:dataflow:chicagoTaxiExample') - switches('-PgcsRoot=gs://temp-storage-for-perf-tests/chicago-taxi') - switches("-PpipelineOptions=\"${LoadTestsBuilder.parseOptions(pipelineOptions)}\"") - } - } -} - -PhraseTriggeringPostCommitBuilder.postCommitJob( - 'beam_PostCommit_Python_Chicago_Taxi_Dataflow', - 'Run Chicago Taxi on Dataflow', - 'Chicago Taxi Example on Dataflow ("Run Chicago Taxi on Dataflow")', - this - ) { - chicagoTaxiJob(delegate) - } - -// TODO(https://github.com/apache/beam/issues/19973): Chicago Taxi Example doesn't work in Python 3. -// Uncomment below once it is fixed. -// -// CronJobBuilder.cronJob( -// 'beam_PostCommit_Python_Chicago_Taxi_Dataflow', -// 'H H * * *', -// this -// ) { -// chicagoTaxiJob(delegate) -// } diff --git a/.test-infra/jenkins/job_PostCommit_Python_Chicago_Taxi_Example_Flink.groovy b/.test-infra/jenkins/job_PostCommit_Python_Chicago_Taxi_Example_Flink.groovy deleted file mode 100644 index 6cf852a16893..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_Chicago_Taxi_Example_Flink.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CronJobBuilder -import Flink -import LoadTestsBuilder -import PhraseTriggeringPostCommitBuilder - -import static LoadTestsBuilder.DOCKER_BEAM_JOBSERVER -import static LoadTestsBuilder.DOCKER_CONTAINER_REGISTRY -import static PythonTestProperties.CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION - -def chicagoTaxiJob = { scope -> - scope.description('Runs the Chicago Taxi Example on the Flink runner.') - commonJobProperties.setTopLevelMainJobProperties(scope, 'master', 120) - - def numberOfWorkers = 5 - def beamSdkDockerImage = "beam_python${CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION}_sdk:latest" - - Flink flink = new Flink(scope, 'beam_PostCommit_Python_Chicago_Taxi_Flink') - flink.setUp( - [ - "${DOCKER_CONTAINER_REGISTRY}/${beamSdkDockerImage}" - ], - numberOfWorkers, - "${DOCKER_BEAM_JOBSERVER}/beam_flink${CommonTestProperties.getFlinkVersion()}_job_server:latest") - - def pipelineOptions = [ - parallelism : numberOfWorkers, - job_endpoint : 'localhost:8099', - environment_options : "docker_container_image=${DOCKER_CONTAINER_REGISTRY}/${beamSdkDockerImage}", - environment_type : 'DOCKER', - execution_mode_for_batch: 'BATCH_FORCED', - ] - - scope.steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":sdks:python:test-suites:portable:py${CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION.replace('.', '')}:chicagoTaxiExample") - switches('-PgcsRoot=gs://temp-storage-for-perf-tests/chicago-taxi') - switches("-PpipelineOptions=\"${LoadTestsBuilder.parseOptions(pipelineOptions)}\"") - } - } -} - -PhraseTriggeringPostCommitBuilder.postCommitJob( - 'beam_PostCommit_Python_Chicago_Taxi_Flink', - 'Run Chicago Taxi on Flink', - 'Chicago Taxi Example on Flink ("Run Chicago Taxi on Flink")', - this - ) { - chicagoTaxiJob(delegate) - } - -// TODO(https://github.com/apache/beam/issues/19973): Chicago Taxi Example doesn't work in Python 3. -// Uncomment below once it is fixed. -// -// CronJobBuilder.cronJob( -// 'beam_PostCommit_Python_Chicago_Taxi_Flink', -// 'H H * * *', -// this -// ) { -// chicagoTaxiJob(delegate) -// } diff --git a/.test-infra/jenkins/job_PostCommit_Python_CrossLanguage_Gcp_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Python_CrossLanguage_Gcp_Dataflow.groovy deleted file mode 100644 index 1280fcb4e233..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_CrossLanguage_Gcp_Dataflow.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - - -// This job runs end-to-end cross language GCP IO tests with DataflowRunner. -// Collects tests with the @pytest.mark.uses_gcp_java_expansion_service decorator -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Python_Xlang_Gcp_Dataflow', - 'Run Python_Xlang_Gcp_Dataflow PostCommit', 'Python_Xlang_Gcp_Dataflow (\"Run Python_Xlang_Gcp_Dataflow PostCommit\")', this) { - description('Runs end-to-end cross language GCP IO tests on the Dataflow runner. \"Run Python_Xlang_Gcp_Dataflow PostCommit\"') - - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/pytest*.xml') - } - - - // Gradle goals for this job. - - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":sdks:python:test-suites:dataflow:gcpCrossLanguagePostCommit") - commonJobProperties.setGradleSwitches(delegate) - switches("-PuseWheelDistribution") - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Python_CrossLanguage_Gcp_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Python_CrossLanguage_Gcp_Direct.groovy deleted file mode 100644 index e4bf771be1ae..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_CrossLanguage_Gcp_Direct.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - - -// This job runs end-to-end cross language GCP IO tests with DirectRunner. -// Collects tests with the @pytest.mark.uses_gcp_java_expansion_service decorator -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Python_Xlang_Gcp_Direct', - 'Run Python_Xlang_Gcp_Direct PostCommit', 'Python_Xlang_Gcp_Direct (\"Run Python_Xlang_Gcp_Direct PostCommit\")', this) { - description('Runs end-to-end cross language GCP IO tests on the Direct runner. \"Run Python_Xlang_Gcp_Direct PostCommit\"') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/pytest*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":sdks:python:test-suites:direct:gcpCrossLanguagePostCommit") - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Python_Examples_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Python_Examples_Dataflow.groovy deleted file mode 100644 index 8f91f8102edc..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_Examples_Dataflow.groovy +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Python examples tests with DataflowRunner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Python_Examples_Dataflow', - 'Run Python Examples_Dataflow', 'Python Dataflow Runner Examples', this) { - - description('Runs the Python Examples with DataflowRunner') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/pytest*.xml') - } - - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":sdks:python:test-suites:dataflow:examplesPostCommit") - commonJobProperties.setGradleSwitches(delegate) - switches("-PuseWheelDistribution") - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Python_Examples_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Python_Examples_Spark.groovy deleted file mode 100644 index 6277106c8cbc..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_Examples_Spark.groovy +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Python examples tests with SparkRunner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Python_Examples_Spark', - 'Run Python Examples_Spark', 'Python Spark Runner Examples', this) { - - description('Runs the Python Examples with Spark Runner') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/pytest*.xml') - } - - // Execute shell command to run examples. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":sdks:python:test-suites:portable:sparkExamplesPostCommit") - commonJobProperties.setGradleSwitches(delegate) - } - } - } - - diff --git a/.test-infra/jenkins/job_PostCommit_Python_MongoDBIO_Load_Test.groovy b/.test-infra/jenkins/job_PostCommit_Python_MongoDBIO_Load_Test.groovy deleted file mode 100644 index 9a66b51e5423..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_MongoDBIO_Load_Test.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import CommonJobProperties as common -import Kubernetes - -String jobName = "beam_python_mongoio_load_test" - -job(jobName) { - common.setTopLevelMainJobProperties(delegate) - common.setAutoJob(delegate, 'H H/12 * * *') - common.enablePhraseTriggeringFromPullRequest( - delegate, - 'Python MongoDBIO Load Test', - 'Run Python MongoDBIO Load Test') - - String namespace = common.getKubernetesNamespace(jobName) - String kubeconfigPath = common.getKubeconfigLocationForNamespace(namespace) - Kubernetes k8s = Kubernetes.create(delegate, kubeconfigPath, namespace) - - k8s.apply(common.makePathAbsolute("src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml")) - String mongoHostName = "LOAD_BALANCER_IP" - k8s.loadBalancerIP("mongo-load-balancer-service", mongoHostName) - - Map pipelineOptions = [ - temp_location: 'gs://temp-storage-for-perf-tests/loadtests', - project : 'apache-beam-testing', - region : 'us-central1', - mongo_uri : "mongodb://root:uuinkkS@\$${mongoHostName}:27017", - num_documents: '1000000', - batch_size : '10000', - runner : 'DataflowRunner', - num_workers : '5' - ] - - steps { - gradle { - rootBuildScriptDir(common.checkoutDir) - common.setGradleSwitches(delegate) - switches("-Popts=\'${common.mapToArgString(pipelineOptions)}\'") - tasks(":sdks:python:test-suites:dataflow:mongodbioIT") - } - } -} diff --git a/.test-infra/jenkins/job_PostCommit_Python_Nexmark_Direct.groovy b/.test-infra/jenkins/job_PostCommit_Python_Nexmark_Direct.groovy deleted file mode 100644 index 6927dbe49cdb..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_Nexmark_Direct.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import CommonTestProperties.Runner -import CommonTestProperties.SDK -import CommonTestProperties.TriggeringContext -import NexmarkBuilder as Nexmark -import NoPhraseTriggeringPostCommitBuilder -import PhraseTriggeringPostCommitBuilder - -NoPhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Python_Nexmark_Direct', - 'Python Direct Runner Nexmark Tests', this) { - - description('Runs the Python Nexmark suite on the Direct runner.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240, true, 'beam-perf') - - commonJob(delegate, TriggeringContext.POST_COMMIT) - } - -PhraseTriggeringPostCommitBuilder.postCommitJob('beam_PostCommit_Python_Nexmark_Direct', - 'Run Python Direct Runner Nexmark Tests', 'Python Direct Runner Nexmark Tests', this) { - - description('Runs the Python Nexmark suite on the Direct runner against a Pull Request, on demand.') - - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - commonJob(delegate, TriggeringContext.PR) - } - -private void commonJob(delegate, TriggeringContext triggeringContext) { - def final JOB_SPECIFIC_OPTIONS = [ - 'suite' : 'SMOKE', - 'enforceEncodability' : true, - 'enforceImmutability' : true - ] - Nexmark.standardPythonJob(delegate, Runner.DIRECT, SDK.PYTHON, JOB_SPECIFIC_OPTIONS, triggeringContext) -} diff --git a/.test-infra/jenkins/job_PostCommit_Python_Sickbay.groovy b/.test-infra/jenkins/job_PostCommit_Python_Sickbay.groovy deleted file mode 100644 index f9504ee811f3..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_Sickbay.groovy +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -import static PythonTestProperties.ALL_SUPPORTED_VERSIONS - -// This job defines the Python postcommit sickbay tests. -this.buildSchedule = 'H H * * *' -ALL_SUPPORTED_VERSIONS.each { pythonVersion -> - def versionSuffix = pythonVersion.replace('.', '') - PostcommitJobBuilder.postCommitJob("beam_PostCommit_Sickbay_Python${versionSuffix}", - "Run Python ${pythonVersion} PostCommit Sickbay", - "Python${versionSuffix}_PC_Sickbay(\"Run Python ${pythonVersion} PostCommit Sickbay\")", this) { - description("Runs Python postcommit sickbay tests using Python ${pythonVersion}.") - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 180) - - publishers { - archiveJunit('**/pytest*.xml') - } - - // Execute shell command to test Python SDK. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(":sdks:python:test-suites:dataflow:py${versionSuffix}:postCommitSickbay") - commonJobProperties.setGradleSwitches(delegate) - } - } - } -} \ No newline at end of file diff --git a/.test-infra/jenkins/job_PostCommit_Python_ValidatesContainer_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Python_ValidatesContainer_Dataflow.groovy deleted file mode 100644 index 254096448668..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_ValidatesContainer_Dataflow.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -import static PythonTestProperties.VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS - -// This job runs the suite of Python ValidatesContainer tests against the -// Dataflow runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Py_ValCont', - 'Run Python Dataflow ValidatesContainer', 'Google Cloud Dataflow Runner Python ValidatesContainer Tests', this) { - description('Runs Python ValidatesContainer suite on the Dataflow runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - publishers { - archiveJunit('**/pytest*.xml') - } - - // Execute shell command to test Python SDK. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:dataflow:validatesContainerTests') - commonJobProperties.setGradleSwitches(delegate) - } - } - } - -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Py_ValCont_with_RC', - 'Run Python RC Dataflow ValidatesContainer', 'Google Cloud Dataflow Runner Python ValidatesContainer Tests with RC Dependencies', this) { - description('Runs Python ValidatesContainer suite on the Dataflow runner by installing Release Candidates.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - publishers { - archiveJunit('**/pytest*.xml') - } - - // Execute shell command to test Python SDK. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:dataflow:validatesContainerTests') - switches('-PtestRCDependencies=true') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy b/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy deleted file mode 100644 index db052a0046ce..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of Python ValidatesRunner tests against the -// Dataflow runner V2. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Py_VR_Dataflow', 'Run Python Dataflow ValidatesRunner', - 'Google Cloud Dataflow Runner Python ValidatesRunner Tests', this) { - description('Runs Python ValidatesRunner suite on the Dataflow runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 200) - - publishers { - archiveJunit('**/pytest*.xml') - } - - // Execute gradle task to test Python SDK. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:dataflow:validatesRunnerBatchTests') - tasks(':sdks:python:test-suites:dataflow:validatesRunnerStreamingTests') - switches('-PuseWheelDistribution') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Samza.groovy b/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Samza.groovy deleted file mode 100644 index c49e3677ab8a..000000000000 --- a/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Samza.groovy +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the suite of Python ValidatesRunner tests against the Samza runner. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_Python_VR_Samza', - 'Run Python Samza ValidatesRunner', 'Python Samza ValidatesRunner Tests', this) { - description('Runs the Python ValidatesRunner suite on the Samza runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate) - - // Publish all test results to Jenkins. - publishers { - archiveJunit('**/pytest*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:portable:samzaValidatesRunner') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_SQL.groovy b/.test-infra/jenkins/job_PostCommit_SQL.groovy deleted file mode 100644 index 9207cdbae17d..000000000000 --- a/.test-infra/jenkins/job_PostCommit_SQL.groovy +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -// This job runs the Java postcommit tests, including the suite of integration -// tests. -PostcommitJobBuilder.postCommitJob('beam_PostCommit_SQL', 'Run SQL PostCommit', - 'SQL Post Commit Tests', this) { - - description('Runs PostCommit tests for Beam SQL.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 240) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/build/test-results/**/*.xml') - } - - // Gradle goals for this job. - steps { - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sqlPostCommit') - commonJobProperties.setGradleSwitches(delegate) - } - } - } diff --git a/.test-infra/jenkins/job_PostCommit_TransformService_Direct.groovy b/.test-infra/jenkins/job_PostCommit_TransformService_Direct.groovy deleted file mode 100644 index 03d29069a52c..000000000000 --- a/.test-infra/jenkins/job_PostCommit_TransformService_Direct.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CommonJobProperties as commonJobProperties -import PostcommitJobBuilder - -import static PythonTestProperties.CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS - -// This job runs multi-language pipelines using the Docker Compose based TransformService against the Direct runner. -// Collects tests with the @pytest.mark.uses_transform_service decorator -PostcommitJobBuilder.postCommitJob('beam_PostCommit_TransformService_Direct', - 'Run TransformService_Direct PostCommit', 'Direct TransformService Tests', this) { - description('Runs the TransformService suite on the Direct runner.') - - // Set common parameters. - commonJobProperties.setTopLevelMainJobProperties(delegate, 'master', 120) - - // Publish all test results to Jenkins - publishers { - archiveJunit('**/pytest*.xml') - } - - // Gradle goals for this job. - steps { - CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS.each { pythonVersion -> - shell("echo \"*** RUN TRANSFORM SERVICE Python SDK TESTS USING THE DIRECT RUNNER AND THE PYTHON VERSION ${pythonVersion} ***\"") - gradle { - rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:direct:xlang:transformServicePythonUsingJava') - commonJobProperties.setGradleSwitches(delegate) - switches '-PtestJavaVersion=11' - switches "-Pjava11Home=${commonJobProperties.JAVA_11_HOME}" - switches("-PuseWheelDistribution") - switches("-PpythonVersion=${pythonVersion}") - } - } - } - } diff --git a/.test-infra/jupyter/README.md b/.test-infra/jupyter/README.md index fc0a5c87f027..dfb1b95c619c 100644 --- a/.test-infra/jupyter/README.md +++ b/.test-infra/jupyter/README.md @@ -37,4 +37,4 @@ jupyter notebook # Should open a browser window. # Pull Requests To minimize file size, diffs, and ease reviews, please clear all cell output -(cell -> all output -> clear) before commiting. +(cell -> all output -> clear) before committing. diff --git a/.test-infra/mock-apis/go.mod b/.test-infra/mock-apis/go.mod index cc65cfbaac76..306d9f53ee07 100644 --- a/.test-infra/mock-apis/go.mod +++ b/.test-infra/mock-apis/go.mod @@ -45,12 +45,12 @@ require ( github.com/googleapis/enterprise-certificate-proxy v0.2.4 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect go.opencensus.io v0.24.0 // indirect - golang.org/x/crypto v0.14.0 // indirect + golang.org/x/crypto v0.17.0 // indirect golang.org/x/net v0.17.0 // indirect golang.org/x/oauth2 v0.12.0 // indirect golang.org/x/sync v0.3.0 // indirect - golang.org/x/sys v0.13.0 // indirect - golang.org/x/text v0.13.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/text v0.14.0 // indirect google.golang.org/api v0.128.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5 // indirect diff --git a/.test-infra/mock-apis/go.sum b/.test-infra/mock-apis/go.sum index a928e3dae2f1..13ca0c274e93 100644 --- a/.test-infra/mock-apis/go.sum +++ b/.test-infra/mock-apis/go.sum @@ -101,8 +101,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -144,8 +144,8 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -154,8 +154,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= diff --git a/CHANGES.md b/CHANGES.md index 9318e85d477b..f59ee274343c 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -53,7 +53,7 @@ * ([#X](https://github.com/apache/beam/issues/X)). --> -# [2.53.0] - Unreleased +# [2.54.0] - Unreleased ## Highlights @@ -63,14 +63,12 @@ ## I/Os * Support for X source added (Java/Python) ([#X](https://github.com/apache/beam/issues/X)). -* TextIO now supports skipping multiple header lines (Java) ([#17990](https://github.com/apache/beam/issues/17990)). -* Python GCSIO is now implemented with GCP GCS Client instead of apitools ([#25676](https://github.com/apache/beam/issues/25676)) -* Adding support for LowCardinality DataType in ClickHouse (Java) ([#29533](https://github.com/apache/beam/pull/29533)). +* Adding support for Tuples DataType in ClickHouse (Java) ([#29715](https://github.com/apache/beam/pull/29715)). + ## New Features / Improvements * X feature added (Java/Python) ([#X](https://github.com/apache/beam/issues/X)). -* The Python SDK now type checks `collections.abc.Collections` types properly. Some type hints that were erroneously allowed by the SDK may now fail. ([#29272](https://github.com/apache/beam/pull/29272)) ## Breaking Changes @@ -78,7 +76,7 @@ ## Deprecations -* Euphoria DSL is deprecated and will be removed in a future release (not before 2.56.0) ([#29451](https://github.com/apache/beam/issues/29451)) +* X behavior is deprecated and will be removed in X versions ([#X](https://github.com/apache/beam/issues/X)). ## Bugfixes @@ -91,6 +89,43 @@ * ([#X](https://github.com/apache/beam/issues/X)). +# [2.53.0] - 2024-01-04 + +## Highlights + +* Python streaming users that use 2.47.0 and newer versions of Beam should update to version 2.53.0, which fixes a known issue: ([#27330](https://github.com/apache/beam/issues/27330)). + +## I/Os + +* TextIO now supports skipping multiple header lines (Java) ([#17990](https://github.com/apache/beam/issues/17990)). +* Python GCSIO is now implemented with GCP GCS Client instead of apitools ([#25676](https://github.com/apache/beam/issues/25676)) +* Added support for handling bad records to KafkaIO (Java) ([#29546](https://github.com/apache/beam/pull/29546)) +* Add support for generating text embeddings in MLTransform for Vertex AI and Hugging Face Hub models.([#29564](https://github.com/apache/beam/pull/29564)) +* NATS IO connector added (Go) ([#29000](https://github.com/apache/beam/issues/29000)). +* Adding support for LowCardinality (Java) ([#29533](https://github.com/apache/beam/pull/29533)). + +## New Features / Improvements + +* The Python SDK now type checks `collections.abc.Collections` types properly. Some type hints that were erroneously allowed by the SDK may now fail. ([#29272](https://github.com/apache/beam/pull/29272)) +* Running multi-language pipelines locally no longer requires Docker. + Instead, the same (generally auto-started) subprocess used to perform the + expansion can also be used as the cross-language worker. +* Framework for adding Error Handlers to composite transforms added in Java ([#29164](https://github.com/apache/beam/pull/29164)). +* Python 3.11 images now include google-cloud-profiler ([#29561](https://github.com/apache/beam/pull/29651)). + +## Deprecations + +* Euphoria DSL is deprecated and will be removed in a future release (not before 2.56.0) ([#29451](https://github.com/apache/beam/issues/29451)) + +## Bugfixes + +* (Python) Fixed sporadic crashes in streaming pipelines that affected some users of 2.47.0 and newer SDKs ([#27330](https://github.com/apache/beam/issues/27330)). +* (Python) Fixed a bug that caused MLTransform to drop identical elements in the output PCollection ([#29600](https://github.com/apache/beam/issues/29600)). + +## Security Fixes + +* Upgraded to go 1.21.5 to build, fixing [CVE-2023-45285](https://security-tracker.debian.org/tracker/CVE-2023-45285) and [CVE-2023-39326](https://security-tracker.debian.org/tracker/CVE-2023-39326) + # [2.52.0] - 2023-11-17 ## Highlights @@ -107,7 +142,7 @@ should handle this. ([#25252](https://github.com/apache/beam/issues/25252)). * Add `UseDataStreamForBatch` pipeline option to the Flink runner. When it is set to true, Flink runner will run batch jobs using the DataStream API. By default the option is set to false, so the batch jobs are still executed using the DataSet API. -* `upload_graph` as one of the Experiments options for DataflowRunner is no longer required when the graph is larger than 10MB for Java SDK ([PR#28621](https://github.com/apache/beam/pull/28621). +* `upload_graph` as one of the Experiments options for DataflowRunner is no longer required when the graph is larger than 10MB for Java SDK ([PR#28621](https://github.com/apache/beam/pull/28621)). * state amd side input cache has been enabled to a default of 100 MB. Use `--max_cache_memory_usage_mb=X` to provide cache size for the user state API and side inputs. (Python) ([#28770](https://github.com/apache/beam/issues/28770)). * Beam YAML stable release. Beam pipelines can now be written using YAML and leverage the Beam YAML framework which includes a preliminary set of IO's and turnkey transforms. More information can be found in the YAML root folder and in the [README](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/yaml/README.md). @@ -131,6 +166,10 @@ as a workaround, a copy of "old" `CountingSource` class should be placed into a * Fixed [CVE-2023-39325](https://www.cve.org/CVERecord?id=CVE-2023-39325) (Java/Python/Go) ([#29118](https://github.com/apache/beam/issues/29118)). * Mitigated [CVE-2023-47248](https://nvd.nist.gov/vuln/detail/CVE-2023-47248) (Python) [#29392](https://github.com/apache/beam/issues/29392). +## Known issues + +* MLTransform drops the identical elements in the output PCollection. For any duplicate elements, a single element will be emitted downstream. ([#29600](https://github.com/apache/beam/issues/29600)). + # [2.51.0] - 2023-10-03 ## New Features / Improvements @@ -165,6 +204,8 @@ as a workaround, a copy of "old" `CountingSource` class should be placed into a * Long-running Python pipelines might experience a memory leak: [#28246](https://github.com/apache/beam/issues/28246). * Python pipelines using BigQuery Storage Read API might need to pin `fastavro` dependency to 1.8.3 or earlier on some runners that don't use Beam Docker containers: [#28811](https://github.com/apache/beam/issues/28811) +* MLTransform drops the identical elements in the output PCollection. For any duplicate elements, a single element will be emitted downstream. ([#29600](https://github.com/apache/beam/issues/29600)). + # [2.50.0] - 2023-08-30 @@ -226,6 +267,7 @@ as a workaround, a copy of "old" `CountingSource` class should be placed into a * Beam Python containers rely on a version of Debian/aom that has several security vulnerabilities: [CVE-2021-30474](https://nvd.nist.gov/vuln/detail/CVE-2021-30474), [CVE-2021-30475](https://nvd.nist.gov/vuln/detail/CVE-2021-30475), [CVE-2021-30473](https://nvd.nist.gov/vuln/detail/CVE-2021-30473), [CVE-2020-36133](https://nvd.nist.gov/vuln/detail/CVE-2020-36133), [CVE-2020-36131](https://nvd.nist.gov/vuln/detail/CVE-2020-36131), [CVE-2020-36130](https://nvd.nist.gov/vuln/detail/CVE-2020-36130), and [CVE-2020-36135](https://nvd.nist.gov/vuln/detail/CVE-2020-36135) * Python SDK's cross-language Bigtable sink mishandles records that don't have an explicit timestamp set: [#28632](https://github.com/apache/beam/issues/28632). To avoid this issue, set explicit timestamps for all records before writing to Bigtable. * Python SDK worker start-up logs, particularly PIP dependency installations, that are not logged at warning or higher are suppressed. This suppression is reverted in 2.51.0. +* MLTransform drops the identical elements in the output PCollection. For any duplicate elements, a single element will be emitted downstream. ([#29600](https://github.com/apache/beam/issues/29600)). # [2.49.0] - 2023-07-17 diff --git a/build.gradle.kts b/build.gradle.kts index 59161809f37c..d4bd0fca4fc1 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -205,6 +205,9 @@ tasks.rat { // Ignore poetry autogenerated files. "**/poetry.lock", + + // DuetAI training prompts + "learning/prompts/**/*.md", ) // Add .gitignore excludes to the Apache Rat exclusion list. We re-create the behavior @@ -232,14 +235,14 @@ tasks.register("javaPreCommit") { dependsOn(":model:pipeline:build") dependsOn(":model:job-management:build") dependsOn(":model:fn-execution:build") - dependsOn(":runners:google-cloud-dataflow-java:worker:build") dependsOn(":sdks:java:core:buildNeeded") // Inline :sdks:java:core:buildDependents so we can carve out pieces at a time dependsOn(":beam-validate-runner:build") dependsOn(":examples:java:build") + dependsOn(":examples:java:preCommit") dependsOn(":examples:java:twitter:build") - dependsOn(":examples:kotlin:build") + dependsOn(":examples:java:twitter:preCommit") dependsOn(":examples:multi-language:build") dependsOn(":model:fn-execution:build") dependsOn(":model:job-management:build") @@ -247,6 +250,7 @@ tasks.register("javaPreCommit") { dependsOn(":runners:core-construction-java:build") dependsOn(":runners:core-java:build") dependsOn(":runners:direct-java:build") + dependsOn(":runners:direct-java:needsRunnerTests") dependsOn(":runners:extensions-java:metrics:build") // lowest supported flink version var flinkVersions = project.ext.get("allFlinkVersions") as Array<*> @@ -268,6 +272,7 @@ tasks.register("javaPreCommit") { dependsOn(":runners:spark:3:job-server:build") dependsOn(":runners:twister2:build") dependsOn(":sdks:java:build-tools:build") + dependsOn(":sdks:java:container:java8:docker") dependsOn(":sdks:java:core:build") dependsOn(":sdks:java:core:jmh:build") dependsOn(":sdks:java:expansion-service:build") @@ -286,16 +291,6 @@ tasks.register("javaPreCommit") { dependsOn(":sdks:java:extensions:schemaio-expansion-service:build") dependsOn(":sdks:java:extensions:sketching:build") dependsOn(":sdks:java:extensions:sorter:build") - dependsOn(":sdks:java:extensions:sql:build") - dependsOn(":sdks:java:extensions:sql:datacatalog:build") - dependsOn(":sdks:java:extensions:sql:expansion-service:build") - dependsOn(":sdks:java:extensions:sql:hcatalog:build") - dependsOn(":sdks:java:extensions:sql:jdbc:build") - dependsOn(":sdks:java:extensions:sql:perf-tests:build") - dependsOn(":sdks:java:extensions:sql:shell:build") - dependsOn(":sdks:java:extensions:sql:udf-test-provider:build") - dependsOn(":sdks:java:extensions:sql:udf:build") - dependsOn(":sdks:java:extensions:sql:zetasql:build") dependsOn(":sdks:java:extensions:timeseries:build") dependsOn(":sdks:java:extensions:zetasketch:build") dependsOn(":sdks:java:fn-execution:build") @@ -309,6 +304,7 @@ tasks.register("javaPreCommit") { dependsOn(":sdks:java:io:sparkreceiver:2:build") dependsOn(":sdks:java:io:synthetic:build") dependsOn(":sdks:java:io:xml:build") + dependsOn(":sdks:java:javadoc:allJavadoc") dependsOn(":sdks:java:testing:expansion-service:build") dependsOn(":sdks:java:testing:jpms-tests:build") dependsOn(":sdks:java:testing:load-tests:build") @@ -318,13 +314,6 @@ tasks.register("javaPreCommit") { dependsOn(":sdks:java:testing:watermarks:build") dependsOn(":sdks:java:transform-service:build") dependsOn(":sdks:java:transform-service:launcher:build") - - dependsOn(":examples:java:preCommit") - dependsOn(":examples:java:twitter:preCommit") - dependsOn(":sdks:java:extensions:sql:jdbc:preCommit") - dependsOn(":sdks:java:javadoc:allJavadoc") - dependsOn(":runners:direct-java:needsRunnerTests") - dependsOn(":sdks:java:container:java8:docker") } // a precommit task build multiple IOs (except those splitting into single jobs) @@ -350,6 +339,7 @@ tasks.register("javaioPreCommit") { dependsOn(":sdks:java:io:jdbc:build") dependsOn(":sdks:java:io:jms:build") dependsOn(":sdks:java:io:kafka:build") + dependsOn(":sdks:java:io:kafka:upgrade:build") dependsOn(":sdks:java:io:kudu:build") dependsOn(":sdks:java:io:mongodb:build") dependsOn(":sdks:java:io:mqtt:build") @@ -376,10 +366,18 @@ tasks.register("flinkPreCommit") { } tasks.register("sqlPreCommit") { - dependsOn(":sdks:java:extensions:sql:runBasicExample") - dependsOn(":sdks:java:extensions:sql:runPojoExample") - dependsOn(":sdks:java:extensions:sql:build") + dependsOn(":sdks:java:extensions:sql:preCommit") dependsOn(":sdks:java:extensions:sql:buildDependents") + dependsOn(":sdks:java:extensions:sql:datacatalog:build") + dependsOn(":sdks:java:extensions:sql:expansion-service:build") + dependsOn(":sdks:java:extensions:sql:hcatalog:build") + dependsOn(":sdks:java:extensions:sql:jdbc:build") + dependsOn(":sdks:java:extensions:sql:jdbc:preCommit") + dependsOn(":sdks:java:extensions:sql:perf-tests:build") + dependsOn(":sdks:java:extensions:sql:shell:build") + dependsOn(":sdks:java:extensions:sql:udf-test-provider:build") + dependsOn(":sdks:java:extensions:sql:udf:build") + dependsOn(":sdks:java:extensions:sql:zetasql:build") } tasks.register("javaPreCommitPortabilityApi") { @@ -724,7 +722,6 @@ if (project.hasProperty("testJavaVersion")) { tasks.getByName("javaPreCommitPortabilityApi").dependsOn(":sdks:java:testing:test-utils:verifyJavaVersion$testVer") tasks.getByName("javaExamplesDataflowPrecommit").dependsOn(":sdks:java:testing:test-utils:verifyJavaVersion$testVer") - tasks.getByName("sqlPreCommit").dependsOn(":sdks:java:testing:test-utils:verifyJavaVersion$testVer") } else { allprojects { tasks.withType(Test::class).configureEach { diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index 359aeea55a2f..9576dd2c2f44 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -598,16 +598,16 @@ class BeamModulePlugin implements Plugin { def dbcp2_version = "2.9.0" def errorprone_version = "2.10.0" // Try to keep gax_version consistent with gax-grpc version in google_cloud_platform_libraries_bom - def gax_version = "2.36.0" + def gax_version = "2.38.0" def google_ads_version = "26.0.0" def google_clients_version = "2.0.0" def google_cloud_bigdataoss_version = "2.2.16" // Try to keep google_cloud_spanner_version consistent with google_cloud_spanner_bom in google_cloud_platform_libraries_bom - def google_cloud_spanner_version = "6.52.1" + def google_cloud_spanner_version = "6.55.0" def google_code_gson_version = "2.10.1" def google_oauth_clients_version = "1.34.1" // Try to keep grpc_version consistent with gRPC version in google_cloud_platform_libraries_bom - def grpc_version = "1.58.0" + def grpc_version = "1.59.1" def guava_version = "32.1.2-jre" def hadoop_version = "2.10.2" def hamcrest_version = "2.1" @@ -626,7 +626,7 @@ class BeamModulePlugin implements Plugin { def postgres_version = "42.2.16" def powermock_version = "2.0.9" // Try to keep protobuf_version consistent with the protobuf version in google_cloud_platform_libraries_bom - def protobuf_version = "3.24.4" + def protobuf_version = "3.25.1" def qpid_jms_client_version = "0.61.0" def quickcheck_version = "1.0" def sbe_tool_version = "1.25.1" @@ -731,10 +731,10 @@ class BeamModulePlugin implements Plugin { // Keep version consistent with the version in google_cloud_resourcemanager, managed by google_cloud_platform_libraries_bom google_api_services_cloudresourcemanager : "com.google.apis:google-api-services-cloudresourcemanager:v1-rev20230806-$google_clients_version", google_api_services_dataflow : "com.google.apis:google-api-services-dataflow:v1b3-rev20220920-$google_clients_version", - google_api_services_healthcare : "com.google.apis:google-api-services-healthcare:v1-rev20231003-$google_clients_version", + google_api_services_healthcare : "com.google.apis:google-api-services-healthcare:v1-rev20231101-$google_clients_version", google_api_services_pubsub : "com.google.apis:google-api-services-pubsub:v1-rev20220904-$google_clients_version", // Keep version consistent with the version in google_cloud_nio, managed by google_cloud_platform_libraries_bom - google_api_services_storage : "com.google.apis:google-api-services-storage:v1-rev20230907-$google_clients_version", + google_api_services_storage : "com.google.apis:google-api-services-storage:v1-rev20231202-$google_clients_version", google_auth_library_credentials : "com.google.auth:google-auth-library-credentials", // google_cloud_platform_libraries_bom sets version google_auth_library_oauth2_http : "com.google.auth:google-auth-library-oauth2-http", // google_cloud_platform_libraries_bom sets version google_cloud_bigquery : "com.google.cloud:google-cloud-bigquery", // google_cloud_platform_libraries_bom sets version @@ -754,7 +754,7 @@ class BeamModulePlugin implements Plugin { // The release notes shows the versions set by the BOM: // https://github.com/googleapis/java-cloud-bom/releases/tag/v26.21.0 // Update libraries-bom version on sdks/java/container/license_scripts/dep_urls_java.yaml - google_cloud_platform_libraries_bom : "com.google.cloud:libraries-bom:26.26.0", + google_cloud_platform_libraries_bom : "com.google.cloud:libraries-bom:26.28.0", google_cloud_spanner : "com.google.cloud:google-cloud-spanner", // google_cloud_platform_libraries_bom sets version google_cloud_spanner_test : "com.google.cloud:google-cloud-spanner:$google_cloud_spanner_version:tests", google_code_gson : "com.google.code.gson:gson:$google_code_gson_version", @@ -2205,7 +2205,7 @@ class BeamModulePlugin implements Plugin { def goRootDir = "${project.rootDir}/sdks/go" // This sets the whole project Go version. - project.ext.goVersion = "go1.21.4" + project.ext.goVersion = "go1.21.5" // Minor TODO: Figure out if we can pull out the GOCMD env variable after goPrepare script // completion, and avoid this GOBIN substitution. diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_54_0.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_54_0.groovy index ddb317b89294..4b8a059109c9 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_54_0.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_54_0.groovy @@ -59,6 +59,7 @@ class GrpcVendoring_1_54_0 { "io.grpc:grpc-netty:$grpc_version", "io.grpc:grpc-protobuf:$grpc_version", "io.grpc:grpc-stub:$grpc_version", + "io.grpc:grpc-alts:$grpc_version", "io.grpc:grpc-testing:$grpc_version", // Use a classifier to ensure we get the jar containing native libraries. In the future // hopefully netty releases a single jar containing native libraries for all architectures. diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/kafka/KafkaTestUtilities.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/kafka/KafkaTestUtilities.groovy index cd2875fdb512..bb08e79edd3c 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/kafka/KafkaTestUtilities.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/kafka/KafkaTestUtilities.groovy @@ -40,7 +40,7 @@ class KafkaTestUtilities { '"keySizeBytes": "10",' + '"valueSizeBytes": "90"' + '}', - "--readTimeout=120", + "--readTimeout=60", "--kafkaTopic=beam", "--withTestcontainers=true", "--kafkaContainerVersion=5.5.2", @@ -56,6 +56,7 @@ class KafkaTestUtilities { excludeTestsMatching "*SDFResumesCorrectly" //Kafka SDF does not work for kafka versions <2.0.1 excludeTestsMatching "*StopReadingFunction" //Kafka SDF does not work for kafka versions <2.0.1 excludeTestsMatching "*WatermarkUpdateWithSparseMessages" //Kafka SDF does not work for kafka versions <2.0.1 + excludeTestsMatching "*KafkaIOSDFReadWithErrorHandler" } } } diff --git a/contributor-docs/rc-testing-guide.md b/contributor-docs/rc-testing-guide.md new file mode 100644 index 000000000000..0ac932711206 --- /dev/null +++ b/contributor-docs/rc-testing-guide.md @@ -0,0 +1,60 @@ + + +# Release Candidate (RC) Testing Guide + +This guide is meant for anybody who is interested in testing Beam Release Candidates against downstream projects. Note +that one need not have any status on the Apache Beam project (eg. PMC Member, Committer) to vote; all are welcome. +Please subscribe to the [dev list](https://lists.apache.org/list.html?dev@beam.apache.org), and vote on the RC Vote email thread. + + +## RC Testing Objectives + +The RC testing process aims to: + + - Test new release candidates against existing code bases utilizing Apache Beam, to ensure there are no unexpected behaviors downstream. + - Incorporate a breadth of perspectives (including validation on multiple SDKs and multiple runners), before releasing a new version. + - Allow Beam Contributors to dogfood their changes and verify that they work as intended. + + +## Beam Release process overview +- For a comprehensive overview on the Beam release process, please take a look at our [release guide](https://github.com/apache/beam/blob/master/contributor-docs/release-guide.md). +- Note that release candidate votes will be open for 72 hours after the voting email is sent. + + +## Ideas for Python SDK Validators + +_Note: Do the following in a dev-like environment._ +- If you are a Python SDK user that utilizes notebooks (eg. Jupyter Notebooks, or Colab Notebooks), change `pip install` +to point to the new RC (e.g. `pip install apache_beam[gcp]==2.52.0rc1`). Re-execute the workflow to ensure everything +works as intended. +- If your workflow utilizes [Dataflow Templates](https://github.com/GoogleCloudPlatform/DataflowTemplates), or another way of launching your job, modify your `requirements.txt` file, `setup.py` file, or `DockerFile` to point to the new Beam RC. +- _Tip_: Run your pipeline both against Direct Runner, and another runner of your choice by modifying your job's `PipelineOptions`. + + +## Ideas for Java SDK Validators +_Note: Do the following in a dev-like environment._ +- If you are a Java SDK user that utilizes Maven or Gradle in your workflow, in your `pom.xml` file, modify the `beam.version` to reflect the newest RC, and modify the `` tag under ``, to point to the repository given in the vote email. [Example Changes](https://github.com/GoogleCloudPlatform/DataflowTemplates/pull/1090/files). + + +## Ideas for Go SDK Validators +_Note: Do the following in a dev-like environment._ +- If you utilize the Go SDK, use `go-get` to use the desired RC, for example, `go get -d github.com/apache/beam/sdks/v2@v2.xx.0-RC1` +- Utilize the `--environment_config`, to point to the new release, for example, `--environment_config=apache/beam_go_sdk:2.xx.0rc1` + + +## After validation + +- Reply to [dev list](https://lists.apache.org/list.html?dev@beam.apache.org) vote thread with your [vote](https://www.apache.org/foundation/voting.html) and an explanation of the use case you tested. +- [Optional]: If your use case can be well represented by a test, consider contributing a test to Beam! However, note that the of the value of validation is manual testing outside of the Beam CI workflow, so it is still recommended (and highly encouraged!) to validate these test cases in future releases. \ No newline at end of file diff --git a/contributor-docs/release-guide.md b/contributor-docs/release-guide.md index faa8ad5927cf..b3b3213d9c97 100644 --- a/contributor-docs/release-guide.md +++ b/contributor-docs/release-guide.md @@ -271,12 +271,6 @@ fixes. This is because bugs are always being introduced as part of normal development. We cut the branch to prevent new bugs being introduced and _then_ we fix and cherrypick any truly release-blocking problems. -In order to run this workflow, you will need to provide a Apache ID and Jenkins -API token. Your Jenkins API token can be generated by visiting -https://ci-beam.apache.org, signing in with your Apache credentials, then going -to `https://ci-beam.apache.org/user//configure` and clicking -`Add new token` in the API token section. - - [ ] Run [cut_release_branch](https://github.com/apache/beam/actions/workflows/cut_release_branch.yml) (click `run workflow`) @@ -288,18 +282,14 @@ diagram: This should be accomplished by the [cut_release_branch](https://github.com/apache/beam/actions/workflows/cut_release_branch.yml) -workflow. This workflow will also update -[mass_comment.py](https://github.com/apache/beam/blob/master/release/src/main/scripts/mass_comment.py) -to contain all of the active Jenkins jobs. +workflow. The following must be manually done or confirmed: - [ ] The `master` branch has the SNAPSHOT/dev version incremented. - [ ] The release branch has the SNAPSHOT/dev version to be released. - [ ] The Dataflow container image should be modified to the version to be released. -- [ ] Due to a bug/limitation in the workflow, you must navigate to the pull - request found in the logs and comment `Run Gradle Publish`. -- [ ] After publish, close the PR. +- [ ] Due to current limitation in the workflow, you must navigate to https://github.com/apache/beam/actions/workflows/beam_Release_NightlySnapshot.yml and click "Run workflow" and select the branch just created (release-2.xx.0) to build a snapshot. - [ ] Manually update `CHANGES.md` on `master` by adding a new section for the next release ([example](https://github.com/apache/beam/commit/96ab1fb3fe07acf7f7dc9d8c829ae36890d1535c)). @@ -357,13 +347,9 @@ issues that would block the creation of the release candidate. ``` (cd release/src/main/scripts && ./verify_release_build.sh) ``` - 4. Trigger all Github Action and Jenkins PostCommit jobs from the PR created by the previous step. + 4. Trigger all Github Action jobs from the PR created by the previous step. For GitHub Action jobs, they should be triggered by the pull_request_target event of a specific placeholder file added to the PR (`release/trigger_all_tests.json`), so no additional action should be needed. - You can run [mass_comment.py](https://github.com/apache/beam/blob/master/release/src/main/scripts/mass_comment.py) to do that. - Or manually add one trigger phrase per PR comment for Jenkins tests, or rerun the workflow for GitHub Action tests. - See [jenkins_jobs.txt](https://github.com/apache/beam/blob/master/release/src/main/scripts/jenkins_jobs.txt) - for a full list of phrases. - **Tasks included in the script** - Installs `hub` with your agreement and setup local git repo; @@ -817,7 +803,7 @@ template; please adjust as you see fit. Reviewers are encouraged to test their own use cases with the release candidate, and vote +1 if no issues are found. Only PMC member votes will count towards the final vote, but votes from all community members is encouraged and helpful for finding regressions; you can either test your own - use cases or use cases from the validation sheet [10]. + use cases [13] or use cases from the validation sheet [10]. The complete staging area is available for your review, which includes: * GitHub Release notes [1], @@ -833,7 +819,7 @@ template; please adjust as you see fit. The vote will be open for at least 72 hours. It is adopted by majority approval, with at least 3 PMC affirmative votes. - For guidelines on how to try the release in your projects, check out our blog post at https://beam.apache.org/blog/validate-beam-release/. + For guidelines on how to try the release in your projects, check out our RC testing guide [13]. Thanks, Release Manager @@ -850,6 +836,7 @@ template; please adjust as you see fit. [10] https://docs.google.com/spreadsheets/d/1qk-N5vjXvbcEk68GjbkSZTR8AGqyNUM-oLFo_ZXBpJw/edit#gid=... [11] https://hub.docker.com/search?q=apache%2Fbeam&type=image [12] https://github.com/apache/beam/pull/... + [13] https://github.com/apache/beam/blob/master/contributor-docs/rc-testing-guide.md If there are any issues found in the release candidate, reply on the vote thread to cancel the vote. There’s no need to wait 72 hours. Go back to @@ -860,7 +847,8 @@ pull request, just correct it on the spot and the vote can continue as-is. ### Run validation tests The community is responsible for performing validation, but as release manager -you are expected to contribute as well. +you are expected to contribute as well. Please see the [RC Testing Guide](https://github.com/apache/beam/blob/master/contributor-docs/rc-testing-guide.md) +for ideas on helping validate testing on downstream projects. Before accepting an RC, as a community we try to exercise most (if not all) of the tests listed in this @@ -1229,13 +1217,16 @@ Ping [dev@](mailto:dev@beam.apache.org) mailing list for assistance if you need Copy the source release from the `dev` repository to the `release` repository at `dist.apache.org` using Subversion. ``` +export RELEASE_VERSION= +export OLD_RELEASE_VERSION= + svn co https://dist.apache.org/repos/dist/dev/beam dev # Checkout the `dev` artifact repo. svn co https://dist.apache.org/repos/dist/release/beam release # Checkout the `release` artifact repo. mkdir release/$RELEASE_VERSION -# Copy files from the `dev` artifact repo to the `release` artifact repo. +cp -R dev/$RELEASE_VERSION/* release/$RELEASE_VERSION/ cd release @@ -1338,6 +1329,18 @@ Release Manager [1] https://github.com/apache/beam/pull/123 ``` +### Update the Java starter repo + +After the new Beam release is published, the Java starter project needs to have its version manually upgraded. +To do this, create a PR like https://github.com/apache/beam-starter-java/pull/94 (with the appropriate version +number). + +#### (Optional) Update the remaining starter repos + +You can also update the versions in https://github.com/apache/beam-starter-python and +https://github.com/apache/beam-starter-go if you would like. This is optional because dependabot will automatically +open a PR to do this if you don't. + ### Update Beam Playground After new Beam Release is published, Beam Playground can be updated following the steps below: diff --git a/examples/java/src/main/java/org/apache/beam/examples/KafkaStreaming.java b/examples/java/src/main/java/org/apache/beam/examples/KafkaStreaming.java index 34a4b646555d..5bad7911e646 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/KafkaStreaming.java +++ b/examples/java/src/main/java/org/apache/beam/examples/KafkaStreaming.java @@ -49,8 +49,11 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.Combine; import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.Sum; +import org.apache.beam.sdk.transforms.errorhandling.BadRecord; +import org.apache.beam.sdk.transforms.errorhandling.ErrorHandler.BadRecordErrorHandler; import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime; import org.apache.beam.sdk.transforms.windowing.FixedWindows; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; @@ -60,6 +63,8 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; +import org.apache.kafka.common.errors.SerializationException; +import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.StringDeserializer; @@ -208,15 +213,22 @@ public void run() { // Start reading form Kafka with the latest offset consumerConfig.put("auto.offset.reset", "latest"); - PCollection> pCollection = - pipeline.apply( - KafkaIO.read() - .withBootstrapServers(options.getKafkaHost()) - .withTopic(TOPIC_NAME) - .withKeyDeserializer(StringDeserializer.class) - .withValueDeserializer(IntegerDeserializer.class) - .withConsumerConfigUpdates(consumerConfig) - .withoutMetadata()); + // Register an error handler for any deserialization errors. + // Errors are simulated with an intentionally failing deserializer + PCollection> pCollection; + try (BadRecordErrorHandler> errorHandler = + pipeline.registerBadRecordErrorHandler(new LogErrors())) { + pCollection = + pipeline.apply( + KafkaIO.read() + .withBootstrapServers(options.getKafkaHost()) + .withTopic(TOPIC_NAME) + .withKeyDeserializer(StringDeserializer.class) + .withValueDeserializer(IntermittentlyFailingIntegerDeserializer.class) + .withConsumerConfigUpdates(consumerConfig) + .withBadRecordErrorHandler(errorHandler) + .withoutMetadata()); + } pCollection // Apply a window and a trigger ourput repeatedly. @@ -317,4 +329,39 @@ public void processElement(ProcessContext c, IntervalWindow w) throws Exception c.output(c.element()); } } + + // Simple PTransform to log Error information + static class LogErrors extends PTransform, PCollection> { + + @Override + public PCollection expand(PCollection input) { + return input.apply("Log Errors", ParDo.of(new LogErrorFn())); + } + + static class LogErrorFn extends DoFn { + @ProcessElement + public void processElement(@Element BadRecord record, OutputReceiver receiver) { + System.out.println(record); + receiver.output(record); + } + } + } + + // Intentionally failing deserializer to simulate bad data from Kafka + public static class IntermittentlyFailingIntegerDeserializer implements Deserializer { + + public static final IntegerDeserializer INTEGER_DESERIALIZER = new IntegerDeserializer(); + public int deserializeCount = 0; + + public IntermittentlyFailingIntegerDeserializer() {} + + @Override + public Integer deserialize(String topic, byte[] data) { + deserializeCount++; + if (deserializeCount % 10 == 0) { + throw new SerializationException("Expected Serialization Exception"); + } + return INTEGER_DESERIALIZER.deserialize(topic, data); + } + } } diff --git a/examples/notebooks/beam-ml/README.md b/examples/notebooks/beam-ml/README.md index 0ae937e9e284..f1c19747fc71 100644 --- a/examples/notebooks/beam-ml/README.md +++ b/examples/notebooks/beam-ml/README.md @@ -16,7 +16,7 @@ specific language governing permissions and limitations under the License. --> -# ML Sample Notebooks +# ML sample notebooks Starting with the Apache Beam SDK version 2.40, users have access to a [RunInference](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference) @@ -25,15 +25,15 @@ transform. This transform allows you to make predictions and inference on data with machine learning (ML) models. The model handler abstracts the user from the configuration needed for specific frameworks, such as Tensorflow, PyTorch, and others. For a full list of supported frameworks, -see the Apache Beam [Machine Learning](https://beam.apache.org/documentation/sdks/python-machine-learning) page. +see the [About Beam ML](https://beam.apache.org/documentation/ml/about-ml/) page. -## Using The Notebooks +## Use the notebooks These notebooks illustrate ways to use Apache Beam's RunInference transforms, as well as different -use cases for [ModelHandler](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler) implementations. -Beam comes with [multiple ModelHandler implementations](https://beam.apache.org/documentation/sdks/python-machine-learning/#modify-a-pipeline-to-use-an-ml-model). +use cases for [`ModelHandler`](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.ModelHandler) implementations. +Beam comes with [multiple `ModelHandler` implementations](https://beam.apache.org/documentation/ml/about-ml/#modify-a-python-pipeline-to-use-an-ml-model). -### Loading the Notebooks +### Load the notebooks 1. To get started quickly with notebooks, use [Colab](https://colab.sandbox.google.com/). 2. In Colab, open the notebook from GitHub using the notebook URL, for example: @@ -48,6 +48,14 @@ to your project and bucket. This section contains the following example notebooks. +### Data processing + +* [Generate text embeddings by using the Vertex AI API](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/data_preprocessing/vertex_ai_text_embeddings.ipynb) +* [Generate text embeddings by using Hugging Face Hub models](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/data_preprocessing/huggingface_text_embeddings.ipynb) +* [Compute and apply vocabulary on a dataset](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/data_preprocessing/compute_and_apply_vocab.ipynb) +* [Use MLTransform to scale data](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/data_preprocessing/scale_data.ipynb) +* [Preprocessing with the Apache Beam DataFrames API](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/dataframe_api_preprocessing.ipynb) + ### Prediction and inference with pretrained models * [Apache Beam RunInference for PyTorch](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/run_inference_pytorch.ipynb) @@ -85,8 +93,3 @@ This section contains the following example notebooks. ### Model Evaluation * [Use TFMA to evaluate and compare model performance](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/tfma_beam.ipynb) - -### Data processing - -* [Preprocess data with MLTransform](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/mltransform_basic.ipynb) -* [Preprocessing with the Apache Beam DataFrames API](https://github.com/apache/beam/blob/master/examples/notebooks/beam-ml/dataframe_api_preprocessing.ipynb) diff --git a/examples/notebooks/beam-ml/automatic_model_refresh.ipynb b/examples/notebooks/beam-ml/automatic_model_refresh.ipynb index cf05979c5b33..b0e9bc2f53e4 100644 --- a/examples/notebooks/beam-ml/automatic_model_refresh.ipynb +++ b/examples/notebooks/beam-ml/automatic_model_refresh.ipynb @@ -15,16 +15,6 @@ } }, "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, { "cell_type": "code", "source": [ diff --git a/examples/notebooks/beam-ml/data_preprocessing/compute_and_apply_vocab.ipynb b/examples/notebooks/beam-ml/data_preprocessing/compute_and_apply_vocab.ipynb new file mode 100644 index 000000000000..ee47cb7711fa --- /dev/null +++ b/examples/notebooks/beam-ml/data_preprocessing/compute_and_apply_vocab.ipynb @@ -0,0 +1,419 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "source": [ + "# @title ###### Licensed to the Apache Software Foundation (ASF), Version 2.0 (the \"License\")\n", + "\n", + "# Licensed to the Apache Software Foundation (ASF) under one\n", + "# or more contributor license agreements. See the NOTICE file\n", + "# distributed with this work for additional information\n", + "# regarding copyright ownership. The ASF licenses this file\n", + "# to you under the Apache License, Version 2.0 (the\n", + "# \"License\"); you may not use this file except in compliance\n", + "# with the License. You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing,\n", + "# software distributed under the License is distributed on an\n", + "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n", + "# KIND, either express or implied. See the License for the\n", + "# specific language governing permissions and limitations\n", + "# under the License" + ], + "metadata": { + "id": "n2PtbQM61lFZ" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Compute and apply vocabulary on a dataset\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + " Run in Google Colab\n", + " \n", + " View source on GitHub\n", + "
\n" + ], + "metadata": { + "id": "ZUSiAR62SgO8" + } + }, + { + "cell_type": "markdown", + "source": [ + "The [`ComputeAndApplyVocabulary`](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.transforms.tft.html#apache_beam.ml.transforms.tft.ComputeAndApplyVocabulary) data processing transform computes a unique vocabulary from a dataset and then maps each word or token to a distinct integer index. Use this transform to change textual data into numerical representations for machine learning (ML) tasks.\n", + "\n", + "When you train ML models that use text data, generating a vocabulary on the incoming dataset is an important preprocessing step. By mapping words to numerical indices, the vocabulary reduces the complexity and dimensionality of dataset. This step allows ML models to process the same words in a consistent way.\n", + "\n", + "This notebook shows how to use `MLTransform` to complete the following tasks:\n", + "* Use `write` mode to generate a vocabulary on the input text and assign an index value to each token.\n", + "* Use `read` mode to use the generated vocabulary and assign an index to a different dataset.\n", + "\n", + "`MLTransform` uses the `ComputeAndApplyVocabulary` transform, which is implemented by using `tensorflow_transform` to generate the vocabulary.\n", + "\n", + "For more information about using `MLTransform`, see [Preprocess data with MLTransform](https://beam.apache.org/documentation/ml/preprocess-data/) in the Apache Beam documentation" + ], + "metadata": { + "id": "Ebs9JtTrqQZH" + } + }, + { + "cell_type": "markdown", + "source": [ + "## Install the required modules\n", + "\n", + "To use `ComputeAndVocabulary` with `MLTransfrom`, install `tensorflow_transform` and the Apache Beam SDK version 2.53.0 or later." + ], + "metadata": { + "id": "BnOMWq9C1Yo0" + } + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "P3uUPotJo7Dc" + }, + "outputs": [], + "source": [ + "! pip install apache_beam>=2.53.0 --quiet\n", + "! pip install tensorflow-transform --quiet" + ] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "import tempfile\n", + "import apache_beam as beam\n", + "from apache_beam.ml.transforms.base import MLTransform\n", + "from apache_beam.ml.transforms.tft import ComputeAndApplyVocabulary" + ], + "metadata": { + "id": "e7-Vmawtq3xC" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Use the artifact location\n", + "\n", + "In `write` mode, the artifact location is used to store artifacts, such as the vocabulary file generated by `ComputeAndApplyVocabulary`.\n", + "\n", + "**NOTE**: The artifact location must be empty. If it isn't empty, a `RuntimeError` occurs.\n", + "\n", + "In `read` mode, `MLTransform` fetches artifacts from the specified artifact location. Pass the same artifact location that you used in `write` mode. Otherwise, a `RuntimeError` occurs or `MLTransform` produces unexpected results in `read` mode.\n" + ], + "metadata": { + "id": "vfarBxAMFvRA" + } + }, + { + "cell_type": "code", + "source": [ + "artifact_location = tempfile.mkdtemp(prefix='compute_and_apply_vocab_')\n", + "artifact_location_with_frequency_threshold = tempfile.mkdtemp(prefix='compute_and_apply_vocab_frequency_threshold_')" + ], + "metadata": { + "id": "wkVNu3SJrYnc" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "documents = [\n", + " {\"feature\": \"the quick brown fox jumps over the lazy dog\"},\n", + " {\"feature\": \"the five boxing wizards jump quickly in the sky\"},\n", + " {\"feature\": \"dogs are running in the park\"},\n", + " {\"feature\": \"the quick brown fox\"}\n", + "]" + ], + "metadata": { + "id": "s3bYkvXlrnRt" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "In this example, in `write` mode, `MLTransform` uses `ComputeAndApplyVocabulary` to generate vocabulary on the incoming dataset. The incoming text data is split into tokens. Each token is assigned an unique index.\n", + "\n", + " The generated vocabulary is stored in an artifact location that you can use on a different dataset in `read` mode." + ], + "metadata": { + "id": "oETBJNVfRws_" + } + }, + { + "cell_type": "code", + "source": [ + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(documents)\n", + " # Compute and apply vocabulary by using MLTransform.\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location).with_transform(\n", + " ComputeAndApplyVocabulary(columns=['feature'], split_string_by_delimiter=' ', vocab_filename='vocab_index'))\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "6hAYN0ZYt2da", + "outputId": "2b843ab1-9b72-4f31-d9b9-58c2e0c0fe75" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(feature=array([ 0, 1, 4, 3, 12, 10, 0, 11, 16]))\n", + "Row(feature=array([ 0, 14, 17, 5, 13, 8, 2, 0, 6]))\n", + "Row(feature=array([15, 18, 7, 2, 0, 9]))\n", + "Row(feature=array([0, 1, 4, 3]))\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Understand and visualize vocabulary\n", + "\n", + "When working with text data in machine learning, one common step is the generation of a vocabulary index. `MLTransform` completes this step by using the `ComputeAndApplyVocabulary` transformation. Each unique word in your text data is assigned a specific index. This index is then used to represent the text in a numerical format, which is needed for machine learning algorithms.\n", + "\n", + "In this example, the `ComputeAndApplyVocabulary` transformation is applied to the `feature` column. A vocabulary index is created for each unique word found in this column.\n", + "\n", + "To visualize and understand this generated vocabulary, use the `ArtifactsFetcher` class. This class allows you to retrieve the vocabulary list from your specified location. When you have this list, you can see the index associated with each word in your vocabulary. This index corresponds to the numerical representation used in the transformation output of `ComputeAndApplyVocabulary`.\n", + "\n", + "Examine this vocabulary index to understand how your text data is being processed and represented numerically. This understanding is useful for debugging and improving machine learning models that rely on text data." + ], + "metadata": { + "id": "hvTvzOw8iBi9" + } + }, + { + "cell_type": "code", + "source": [ + "from apache_beam.ml.transforms.utils import ArtifactsFetcher\n", + "artifact_fetcher = ArtifactsFetcher(artifact_location)\n", + "vocab_list = artifact_fetcher.get_vocab_list(vocab_filename='vocab_index_feature')\n", + "for i in range(len(vocab_list)):\n", + " print(f'{i}: {vocab_list[i]}')" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "dzmd_4GaiACA", + "outputId": "d73a5bee-15bc-4cb4-d2e0-59398a62cd60" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "0: the\n", + "1: quick\n", + "2: in\n", + "3: fox\n", + "4: brown\n", + "5: wizards\n", + "6: sky\n", + "7: running\n", + "8: quickly\n", + "9: park\n", + "10: over\n", + "11: lazy\n", + "12: jumps\n", + "13: jump\n", + "14: five\n", + "15: dogs\n", + "16: dog\n", + "17: boxing\n", + "18: are\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Set the frequency threshold\n", + "\n", + "The `frequency_threshold` parameter identifies the elements that appear frequently in the dataset. This parameter limits the generated vocabulary to elements with an absolute frequency greater than or equal to the specified threshold. If you don't specify the parameter, the entire vocabulary is generated.\n", + "\n", + "If the frequency of a vocabulary item is less than the threshold, it's assigned a default value. You can use the `default_value` parameter to set this value. Otherwise, it defaults to `-1`." + ], + "metadata": { + "id": "oZZtAVk4wJCd" + } + }, + { + "cell_type": "code", + "source": [ + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(documents)\n", + " # Compute and apply vocabulary by using MLTransform.\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location_with_frequency_threshold).with_transform(\n", + " ComputeAndApplyVocabulary(columns=['feature'], split_string_by_delimiter=' ', frequency_threshold=2, vocab_filename='vocab_index'))\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "FpDkOSWtv1pn", + "outputId": "ff74a769-534c-4f40-fc59-bf4a10eaea7c" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(feature=array([ 0, 1, 4, 3, -1, -1, 0, -1, -1]))\n", + "Row(feature=array([ 0, -1, -1, -1, -1, -1, 2, 0, -1]))\n", + "Row(feature=array([-1, -1, -1, 2, 0, -1]))\n", + "Row(feature=array([0, 1, 4, 3]))\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "In the output, if the frequency of the token is less than the specified frequency, it's assigned to a `default_value` of `-1`. For the other tokens, a vocabulary file is generated." + ], + "metadata": { + "id": "h1s4a6hzxKrb" + } + }, + { + "cell_type": "code", + "source": [ + "from apache_beam.ml.transforms.utils import ArtifactsFetcher\n", + "artifact_fetcher = ArtifactsFetcher(artifact_location_with_frequency_threshold)\n", + "vocab_list = artifact_fetcher.get_vocab_list(vocab_filename='vocab_index_feature')\n", + "for i in range(len(vocab_list)):\n", + " print(f'{i}: {vocab_list[i]}')" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "rwgthpj9x8kp", + "outputId": "e5fcdd51-75c1-4da1-d5a4-d7f39129cc85" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "0: the\n", + "1: quick\n", + "2: in\n", + "3: fox\n", + "4: brown\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Use MLTransform for inference workloads\n", + "\n", + "When `MLTransform` is in `write` mode, it produces artifacts, such as vocabulary files for `ComputeAndApplyVocabulary`. These artifacts allow you to apply the same vocabulary, and any other preprocessing transforms, when you train your model and serve it in production, or when you test its accuracy.\n", + "\n", + "When `MLTransform` is used `read` mode, it uses the previously generated vocabulary files to map the incoming text data. If the incoming vocabulary isn't found in the generated vocabulary, then the incoming vocabulary is mapped to a `default_value` provided during `write` mode. In this case, the `default_value` is `-1`.\n", + "\n", + "When `MLTransform` is in `write` mode, it produces artifacts, such as vocabulary files for `ComputeAndApplyVocabulary`.\n", + "\n" + ], + "metadata": { + "id": "PEqYpCexybO4" + } + }, + { + "cell_type": "code", + "source": [ + "test_documents = [\n", + " {'feature': 'wizards are flying in the sky'},\n", + " {'feature': 'I love dogs'}\n", + "]\n", + "\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(test_documents)\n", + " # Compute and apply vocabulary by using MLTransform.\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(read_artifact_location=artifact_location))\n", + "\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "x-LRXUaTyX_B", + "outputId": "0e0b04df-a684-49c9-fe72-a032c4726078" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(feature=array([ 5, 18, -1, 2, 0, 6]))\n", + "Row(feature=array([-1, -1, 15]))\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "When you specify `read_artifact_location`, you don't have to pass any transforms to `MLTransform`. Instead, `MLTransform` saves the artifacts and the transforms produced in the location specified by `write_artifact_location`." + ], + "metadata": { + "id": "qwqH283F0kee" + } + } + ] +} diff --git a/examples/notebooks/beam-ml/data_preprocessing/huggingface_text_embeddings.ipynb b/examples/notebooks/beam-ml/data_preprocessing/huggingface_text_embeddings.ipynb new file mode 100644 index 000000000000..30bbadf5b9b0 --- /dev/null +++ b/examples/notebooks/beam-ml/data_preprocessing/huggingface_text_embeddings.ipynb @@ -0,0 +1,399 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "id": "UmEFwsNs1OES" + }, + "outputs": [], + "source": [ + "# @title ###### Licensed to the Apache Software Foundation (ASF), Version 2.0 (the \"License\")\n", + "\n", + "# Licensed to the Apache Software Foundation (ASF) under one\n", + "# or more contributor license agreements. See the NOTICE file\n", + "# distributed with this work for additional information\n", + "# regarding copyright ownership. The ASF licenses this file\n", + "# to you under the Apache License, Version 2.0 (the\n", + "# \"License\"); you may not use this file except in compliance\n", + "# with the License. You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing,\n", + "# software distributed under the License is distributed on an\n", + "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n", + "# KIND, either express or implied. See the License for the\n", + "# specific language governing permissions and limitations\n", + "# under the License" + ] + }, + { + "cell_type": "markdown", + "source": [ + "# Generate text embeddings by using Hugging Face Hub models\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + " Run in Google Colab\n", + " \n", + " View source on GitHub\n", + "
\n" + ], + "metadata": { + "id": "ZUSiAR62SgO8" + } + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "Use text embeddings to represent text as numerical vectors. This process lets computers understand and process text data, which is essential for many natural language processing (NLP) tasks.\n", + "\n", + "The following NLP tasks use embeddings:\n", + "\n", + "* **Semantic search:** Find documents or passages that are relevant to a query when the query doesn't use the exact same words as the documents.\n", + "* **Text classification:** Categorize text data into different classes, such as spam and not spam, or positive sentiment and negative sentiment.\n", + "* **Machine translation:** Translate text from one language to another and preserve the meaning.\n", + "* **Text summarization:** Create shorter summaries of text.\n", + "\n", + "This notebook uses Apache Beam's `MLTransform` to generate embeddings from text data.\n", + "\n", + "Hugging Face's [`SentenceTransformers`](https://huggingface.co/sentence-transformers) framework uses Python to generate sentence, text, and image embeddings.\n", + "\n", + "To generate text embeddings that use Hugging Face models and `MLTransform`, use the `SentenceTransformerEmbeddings` module to specify the model configuration.\n" + ], + "metadata": { + "id": "yvVIEhF01ZWq" + } + }, + { + "cell_type": "markdown", + "source": [ + "## Install dependencies\n", + "\n", + "Install Apache Beam and the dependencies needed to work with Hugging Face embeddings. The dependencies includes the `sentence-transformers` package, which is required to use the `SentenceTransformerEmbeddings` module.\n" + ], + "metadata": { + "id": "jqYXaBJ821Zs" + } + }, + { + "cell_type": "code", + "source": [ + "! pip install apache_beam>=2.53.0 --quiet\n", + "! pip install sentence-transformers --quiet" + ], + "metadata": { + "id": "shzCUrZI1XhF" + }, + "execution_count": 28, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import tempfile\n", + "import apache_beam as beam\n", + "from apache_beam.ml.transforms.base import MLTransform\n", + "from apache_beam.ml.transforms.embeddings.huggingface import SentenceTransformerEmbeddings" + ], + "metadata": { + "id": "jVxSi2jS3M3b" + }, + "execution_count": 29, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Process the data\n", + "\n", + "`MLTransform` is a `PTransform` that you can use for data preparation, including generating text embeddings.\n", + "\n", + "### Use MLTransform in write mode\n", + "\n", + "In `write` mode, `MLTransform` saves the transforms and their attributes to an artifact location. Then, when you run `MLTransform` in `read` mode, these transforms are used. This process ensures that you're applying the same preprocessing steps when you train your model and when you serve the model in production or test its accuracy.\n", + "\n", + "For more information about using `MLTransform`, see [Preprocess data with MLTransform](https://beam.apache.org/documentation/ml/preprocess-data/) in the Apache Beam documentation." + ], + "metadata": { + "id": "kXDM8C7d3nPV" + } + }, + { + "cell_type": "markdown", + "source": [ + "### Get the data\n", + "\n", + "The following text inputs come from the Hugging Face blog [Getting Started With Embeddings](https://huggingface.co/blog/getting-started-with-embeddings).\n", + "\n", + "\n", + "`MLTransform` operates on dictionaries of data. To generate embeddings for specific columns, provide the column names as input to the `columns` argument in the `SentenceTransformerEmbeddings` package." + ], + "metadata": { + "id": "Dbkmu3HP6Kql" + } + }, + { + "cell_type": "code", + "source": [ + "content = [\n", + " {'x': 'How do I get a replacement Medicare card?'},\n", + " {'x': 'What is the monthly premium for Medicare Part B?'},\n", + " {'x': 'How do I terminate my Medicare Part B (medical insurance)?'},\n", + " {'x': 'How do I sign up for Medicare?'},\n", + " {'x': 'Can I sign up for Medicare Part B if I am working and have health insurance through an employer?'},\n", + " {'x': 'How do I sign up for Medicare Part B if I already have Part A?'},\n", + " {'x': 'What are Medicare late enrollment penalties?'},\n", + " {'x': 'What is Medicare and who can get it?'},\n", + " {'x': 'How can I get help with my Medicare Part A and Part B premiums?'},\n", + " {'x': 'What are the different parts of Medicare?'},\n", + " {'x': 'Will my Medicare premiums be higher because of my higher income?'},\n", + " {'x': 'What is TRICARE ?'},\n", + " {'x': \"Should I sign up for Medicare Part B if I have Veterans' Benefits?\"}\n", + "]\n", + "\n", + "text_embedding_model_name = 'sentence-transformers/sentence-t5-large'\n", + "\n", + "\n", + "# helper function that returns a dict containing only first\n", + "# ten elements of generated embeddings\n", + "def truncate_embeddings(d):\n", + " for key in d.keys():\n", + " d[key] = d[key][:10]\n", + " return d" + ], + "metadata": { + "id": "LCTUs8F73iDg" + }, + "execution_count": 30, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "### Generate text embeddings\n", + "This example uses the model `sentence-transformers/sentence-t5-large` to generate text embeddings. The model uses only the encoder from a `T5-large model`. The weights are stored in FP16. For more information about the model, see [Sentence-T5: Scalable Sentence Encoders from Pre-trained Text-to-Text Models](https://arxiv.org/abs/2108.08877)." + ], + "metadata": { + "id": "SApMmlRLRv_e" + } + }, + { + "cell_type": "code", + "source": [ + "artifact_location_t5 = tempfile.mkdtemp(prefix='huggingface_')\n", + "embedding_transform = SentenceTransformerEmbeddings(\n", + " model_name=text_embedding_model_name, columns=['x'])\n", + "\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = (\n", + " pipeline\n", + " | \"CreateData\" >> beam.Create(content))\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location_t5).with_transform(embedding_transform))\n", + "\n", + " transformed_pcoll | beam.Map(truncate_embeddings) | 'LogOutput' >> beam.Map(print)\n", + "\n", + " transformed_pcoll | \"PrintEmbeddingShape\" >> beam.Map(lambda x: print(f\"Embedding shape: {len(x['x'])}\"))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "SF6izkN134sf", + "outputId": "524d3506-d31f-4dee-9079-1ed6d7cadf1a" + }, + "execution_count": 31, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'x': [-0.0317193828523159, -0.005265399813652039, -0.012499183416366577, 0.00018130357784684747, -0.005592408124357462, 0.06207558885216713, -0.01656288281083107, 0.0167048592120409, -0.01239298190921545, 0.03041897714138031]}\n", + "Embedding shape: 10\n", + "{'x': [-0.015295305289328098, 0.005405726842582226, -0.015631258487701416, 0.022797023877501488, -0.027843449264764786, 0.03968179598450661, -0.004387892782688141, 0.022909151390194893, 0.01015392318367958, 0.04723235219717026]}\n", + "Embedding shape: 10\n", + "{'x': [-0.03450256213545799, -0.002632762538269162, -0.022460950538516045, -0.011689935810863972, -0.027329981327056885, 0.07293087989091873, -0.03069353476166725, 0.05429817736148834, -0.01308195199817419, 0.017668722197413445]}\n", + "Embedding shape: 10\n", + "{'x': [-0.02869587577879429, -0.0002648509689606726, -0.007186499424278736, -0.0003750955802388489, 0.012458174489438534, 0.06721009314060211, -0.013404129073023796, 0.03204648941755295, -0.021021844819188118, 0.04968355968594551]}\n", + "Embedding shape: 10\n", + "{'x': [-0.03241290897130966, 0.006845517549663782, 0.02001815102994442, -0.0057969288900494576, 0.008191823959350586, 0.08160955458879471, -0.009215254336595535, 0.023534387350082397, -0.02034241147339344, 0.0357462577521801]}\n", + "Embedding shape: 10\n", + "{'x': [-0.04592451825737953, -0.0025395643897354603, -0.01178023498505354, 0.011568977497518063, -0.0029014083556830883, 0.06971456110477448, -0.021167151629924774, 0.015902182087302208, -0.015007994137704372, 0.026213033124804497]}\n", + "Embedding shape: 10\n", + "{'x': [0.005221465136855841, -0.002127869985997677, -0.002369001042097807, -0.019337018951773643, 0.023243796080350876, 0.05599674955010414, -0.022721167653799057, 0.024813007563352585, -0.010685156099498272, 0.03624529018998146]}\n", + "Embedding shape: 10\n", + "{'x': [-0.035339221358299255, 0.010706206783652306, -0.001701260800473392, -0.00862252525985241, 0.006445988081395626, 0.08198338001966476, -0.022678885608911514, 0.01434261817485094, -0.008092232048511505, 0.03345781937241554]}\n", + "Embedding shape: 10\n", + "{'x': [-0.030748076736927032, 0.009340512566268444, -0.013637945055961609, 0.011183148249983788, -0.013879665173590183, 0.046350326389074326, -0.024090109393000603, 0.02885228954255581, -0.01699884608387947, 0.01672385260462761]}\n", + "Embedding shape: 10\n", + "{'x': [-0.040792081505060196, -0.00872269831597805, -0.015838179737329483, -0.03141209855675697, -7.104632823029533e-05, 0.08301416039466858, -0.034691162407398224, 0.0026397297624498606, 0.009255227632820606, 0.05415954813361168]}\n", + "Embedding shape: 10\n", + "{'x': [-0.02156883291900158, 0.003969342447817326, -0.030446071177721024, 0.008231461979448795, -0.01271845493465662, 0.03793857619166374, -0.013524272479116917, -0.0385628417134285, -0.0058258213102817535, 0.03505263477563858]}\n", + "Embedding shape: 10\n", + "{'x': [-0.027544165030121803, -0.01773364469408989, -0.013286487199366093, -0.008328652940690517, -0.011047529056668282, 0.05237515643239021, -0.016948163509368896, 0.02806701697409153, -0.0018120920285582542, 0.027241172268986702]}\n", + "Embedding shape: 10\n", + "{'x': [-0.03464886546134949, -0.003521248232573271, -0.010239562019705772, -0.018618224188685417, 0.004094886127859354, 0.062059685587882996, -0.013881963677704334, -0.0008639032603241503, -0.029874088242650032, 0.033531222492456436]}\n", + "Embedding shape: 10\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "You can pass additional arguments that are supported by `sentence-transformer` models, such as `convert_to_numpy=False`. These arguments are passed as a `dict` to the `SentenceTransformerEmbeddings` transform by using the `inference_args` parameter.\n", + "\n", + "When you pass `convert_to_numpy=False`, the output contains `torch.Tensor` matrices." + ], + "metadata": { + "id": "1MFom0PW_vRv" + } + }, + { + "cell_type": "code", + "source": [ + "artifact_location_t5_with_inference_args = tempfile.mkdtemp(prefix='huggingface_')\n", + "\n", + "embedding_transform = SentenceTransformerEmbeddings(\n", + " model_name=text_embedding_model_name, columns=['x'],\n", + " inference_args={'convert_to_numpy': False}\n", + " )\n", + "\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = (\n", + " pipeline\n", + " | \"CreateData\" >> beam.Create(content))\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location_t5_with_inference_args).with_transform(embedding_transform))\n", + "\n", + " # The outputs are in the PyTorch tensor type.\n", + " transformed_pcoll | 'LogOutput' >> beam.Map(lambda x: print(type(x['x'])))\n", + "\n", + " transformed_pcoll | \"PrintEmbeddingShape\" >> beam.Map(lambda x: print(f\"Embedding shape: {len(x['x'])}\"))\n" + ], + "metadata": { + "id": "xyezKuzY_uLD", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "96babb3b-c61b-40a2-96f7-572a2a46bd83" + }, + "execution_count": 24, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n", + "\n", + "Embedding shape: 768\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "### Use MLTransform in read mode\n", + "\n", + "In `read` mode, `MLTransform` uses the artifacts generated during `write` mode. In this case, the `SentenceTransformEmbedding` transform and its attributes are loaded from the saved artifacts. You don't need to specify the artifacts again during `read` mode.\n", + "\n", + "In this way, `MLTransform` provides consistent preprocessing steps for training and inference workloads." + ], + "metadata": { + "id": "aPIQzCoF_EBj" + } + }, + { + "cell_type": "code", + "source": [ + "test_content = [\n", + " {\n", + " 'x': 'This is a test sentence'\n", + " },\n", + " {\n", + " 'x': 'The park is full of dogs'\n", + " },\n", + " {\n", + " 'x': \"Should I sign up for Medicare Part B if I have Veterans' Benefits?\"\n", + " }\n", + "]\n", + "\n", + "# Uses the T5 model to generate text embeddings\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = (\n", + " pipeline\n", + " | \"CreateData\" >> beam.Create(test_content))\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(read_artifact_location=artifact_location_t5))\n", + "\n", + " transformed_pcoll | beam.Map(truncate_embeddings) | 'LogOutput' >> beam.Map(print)\n" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "RCqYeUd3_F3C", + "outputId": "4c5b61d2-df39-4af3-8520-f529f243e3b1" + }, + "execution_count": 26, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'x': [0.00036313451710157096, -0.03929319977760315, -0.03574873134493828, 0.05015222355723381, 0.04295048117637634, 0.04800170287489891, 0.006883862894028425, -0.02567591704428196, -0.048067063093185425, 0.036534328013658524]}\n", + "{'x': [-0.053793832659721375, 0.006730600260198116, -0.025130020454525948, 0.04363932088017464, 0.03323192894458771, 0.008803879842162132, -0.015412433072924614, 0.008926985785365105, -0.061175212264060974, 0.04573329910635948]}\n", + "{'x': [-0.03464885801076889, -0.003521254053339362, -0.010239563882350922, -0.018618224188685417, 0.004094892647117376, 0.062059689313173294, -0.013881963677704334, -0.000863900815602392, -0.029874078929424286, 0.03353121876716614]}\n" + ] + } + ] + } + ] +} diff --git a/examples/notebooks/beam-ml/data_preprocessing/scale_data.ipynb b/examples/notebooks/beam-ml/data_preprocessing/scale_data.ipynb new file mode 100644 index 000000000000..3c8946362a31 --- /dev/null +++ b/examples/notebooks/beam-ml/data_preprocessing/scale_data.ipynb @@ -0,0 +1,504 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "A7HK1Duzdnew" + }, + "outputs": [], + "source": [ + "# @title ###### Licensed to the Apache Software Foundation (ASF), Version 2.0 (the \"License\")\n", + "\n", + "# Licensed to the Apache Software Foundation (ASF) under one\n", + "# or more contributor license agreements. See the NOTICE file\n", + "# distributed with this work for additional information\n", + "# regarding copyright ownership. The ASF licenses this file\n", + "# to you under the Apache License, Version 2.0 (the\n", + "# \"License\"); you may not use this file except in compliance\n", + "# with the License. You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing,\n", + "# software distributed under the License is distributed on an\n", + "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n", + "# KIND, either express or implied. See the License for the\n", + "# specific language governing permissions and limitations\n", + "# under the License" + ] + }, + { + "cell_type": "markdown", + "source": [ + "# Use MLTransform to scale data\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + " Run in Google Colab\n", + " \n", + " View source on GitHub\n", + "
\n" + ], + "metadata": { + "id": "ZUSiAR62SgO8" + } + }, + { + "cell_type": "markdown", + "source": [ + "Scaling data is an important preprocessing step for training machine learning (ML) models, because it helps to ensure that all features have a similar weight or influence on the model. The following are benefits of scaling data:\n", + "\n", + "* **Improved convergence of gradient descent algorithms:** Many machine learning algorithms, such as linear regression and neural networks, use gradient descent to optimize their parameters. Gradient descent iteratively moves the parameters of the model in the direction that reduces the loss function. If the features aren't scaled, features with larger ranges can have a much larger impact on the gradient, making it difficult for the model to converge. Scaling the features helps to ensure that all features contribute equally to the gradient, which can lead to faster and more stable convergence.\n", + "\n", + "* **Uniformity in features:** If one feature has a much larger range than the other features, it can dominate the model and make it difficult for the model to learn from the other features. This lack of uniformity can cause poor performance and biased predictions. Scaling the features brings all of the features into a similar range.\n", + "\n", + "\n", + "To scale your dataset using Apache Beam, use `MLTransform` with one of the following transforms:\n", + "\n", + "* `ScaleTo01`: Calculates the minimum and maximum of an entire dataset, and then scales the dataset between 0 and 1 based on minimum and maximum values.\n", + "* `ScaleToZScore`: Calculates the mean and variance of an entire dataset, and then scales the dataset based on those values.\n", + "* `ScaleByMinMax`: Scales the data in a dataset, taking minimum and maximum values as input parameters.\n", + "\n", + "For each data processing transform, `MLTransform` runs in both `write` mode and `read` mode. For more information about using `MLTransform`, see [Preprocess data with MLTransform](https://beam.apache.org/documentation/ml/preprocess-data/) in the Apache Beam documentation.\n", + "\n", + "## MLTransform in write mode\n", + "\n", + "When `MLTransform` is in `write` mode, it produces artifacts, such as minimum, maximum, and variance, for different data processing transforms. These artifacts allow you to ensure that you're applying the same artifacts, and any other preprocessing transforms, when you train your model and serve it in production, or when you test its accuracy.\n", + "\n", + "## MLTransform in read mode\n", + "\n", + "In read mode, `MLTransform` uses the artifacts generated in `write` mode to scale the entire dataset." + ], + "metadata": { + "id": "F8-yTcjPUHLv" + } + }, + { + "cell_type": "markdown", + "source": [ + "## Import the required modules\n", + "\n", + "To use `MLTransfrom`, install `tensorflow_transform` and the Apache Beam SDK version 2.53.0 or later." + ], + "metadata": { + "id": "8qhhZTbIyqFb" + } + }, + { + "cell_type": "code", + "source": [ + "! pip install apache_beam>=2.53.0 --quiet\n", + "! pip install tensorflow-transform --quiet" + ], + "metadata": { + "id": "cFP4CQksT_Zv" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import os\n", + "import tempfile\n", + "import apache_beam as beam\n", + "from apache_beam.ml.transforms.base import MLTransform\n", + "from apache_beam.ml.transforms.tft import ScaleTo01\n", + "from apache_beam.ml.transforms.tft import ScaleByMinMax\n", + "from apache_beam.ml.transforms.tft import ScaleToZScore" + ], + "metadata": { + "id": "y-B7OZ0lyeee" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "artifact_location_scale_to_01 = tempfile.mkdtemp(prefix='scale_to_01_')\n", + "artifact_location_scale_to_zscore = tempfile.mkdtemp(prefix='scale_to_zscore_')\n", + "artifact_location_scale_by_min_max = tempfile.mkdtemp(prefix='scale_by_min_max_')" + ], + "metadata": { + "id": "UyBji8pSUEJt" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# data used in MLTransform's write mode\n", + "data = [\n", + " {'int_feature_1' : 11, 'int_feature_2': -10},\n", + " {'int_feature_1': 34, 'int_feature_2': -33},\n", + " {'int_feature_1': 5, 'int_feature_2': -63},\n", + " {'int_feature_1': 12, 'int_feature_2': -38},\n", + " {'int_feature_1': 32, 'int_feature_2': -65},\n", + " {'int_feature_1': 63, 'int_feature_2': -21},\n", + "]\n", + "\n", + "# data used in MLTransform's read mode\n", + "test_data = [\n", + " {'int_feature_1': 29, 'int_feature_2': -20},\n", + " {'int_feature_1': -5, 'int_feature_2': -11},\n", + " {'int_feature_1': 5, 'int_feature_2': -44},\n", + " {'int_feature_1': 29, 'int_feature_2': -12},\n", + " {'int_feature_1': 20, 'int_feature_2': -53},\n", + " {'int_feature_1': 70, 'int_feature_2': -8}\n", + "]\n" + ], + "metadata": { + "id": "lNG0WI7Dy1dd" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Scale the data between 0 and 1\n", + "\n", + "Scale the data so that it's in the range of 0 to 1. To scale the data, the transform calculates minimum and maximum values on the whole dataset, and then performs the following calculation:\n", + "\n", + "`x = (x - x_min) / (x_max - x_min)`\n", + "\n", + "To scale the data, use the [ScaleTo01](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.transforms.tft.html#apache_beam.ml.transforms.tft.ScaleTo01) data processing transform in `MLTransform`." + ], + "metadata": { + "id": "MjMPgB1T0Wzu" + } + }, + { + "cell_type": "code", + "source": [ + "# MLTransform in write mode.\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(data)\n", + "\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location_scale_to_01).with_transform(\n", + " ScaleTo01(columns=['int_feature_1', 'int_feature_2'])\n", + " )\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 211 + }, + "id": "xuoLla2u0Cf_", + "outputId": "dab7f024-ca5e-4f2b-83b8-c09612c19450" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "application/javascript": [ + "\n", + " if (typeof window.interactive_beam_jquery == 'undefined') {\n", + " var jqueryScript = document.createElement('script');\n", + " jqueryScript.src = 'https://code.jquery.com/jquery-3.4.1.slim.min.js';\n", + " jqueryScript.type = 'text/javascript';\n", + " jqueryScript.onload = function() {\n", + " var datatableScript = document.createElement('script');\n", + " datatableScript.src = 'https://cdn.datatables.net/1.10.20/js/jquery.dataTables.min.js';\n", + " datatableScript.type = 'text/javascript';\n", + " datatableScript.onload = function() {\n", + " window.interactive_beam_jquery = jQuery.noConflict(true);\n", + " window.interactive_beam_jquery(document).ready(function($){\n", + " \n", + " });\n", + " }\n", + " document.head.appendChild(datatableScript);\n", + " };\n", + " document.head.appendChild(jqueryScript);\n", + " } else {\n", + " window.interactive_beam_jquery(document).ready(function($){\n", + " \n", + " });\n", + " }" + ] + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(int_feature_1=array([0.10344828], dtype=float32), int_feature_2=array([1.], dtype=float32))\n", + "Row(int_feature_1=array([0.5], dtype=float32), int_feature_2=array([0.58181816], dtype=float32))\n", + "Row(int_feature_1=array([0.], dtype=float32), int_feature_2=array([0.03636364], dtype=float32))\n", + "Row(int_feature_1=array([0.12068965], dtype=float32), int_feature_2=array([0.4909091], dtype=float32))\n", + "Row(int_feature_1=array([0.46551725], dtype=float32), int_feature_2=array([0.], dtype=float32))\n", + "Row(int_feature_1=array([1.], dtype=float32), int_feature_2=array([0.8], dtype=float32))\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "In the this dataset, the following are the minimum and maximum values for the columns:\n", + "* `int_feature_1`: 5 and 63.\n", + "* `int_feature_2`: -65 and -10\n", + "\n", + "In the output for the column `int_feature_1`, the data is scaled between 0 and 1 by using the values `5` and `63`. `5` is scaled to `0`, and `63` is scaled to `1`. The remaining values are scaled between 0 and 1 by using the formula `x = (x - x_min) / (x_max - x_min)`." + ], + "metadata": { + "id": "Hwvh4UyI23pz" + } + }, + { + "cell_type": "code", + "source": [ + "# MLTransform in read mode\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(test_data)\n", + "\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(read_artifact_location=artifact_location_scale_to_01)\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "id": "M8obCCCL1TM6", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "813bf5c2-6be7-49d3-d622-33f3f66fe2c9" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(int_feature_1=array([0.41379312], dtype=float32), int_feature_2=array([0.8181818], dtype=float32))\n", + "Row(int_feature_1=array([-0.1724138], dtype=float32), int_feature_2=array([0.9818182], dtype=float32))\n", + "Row(int_feature_1=array([0.], dtype=float32), int_feature_2=array([0.38181818], dtype=float32))\n", + "Row(int_feature_1=array([0.41379312], dtype=float32), int_feature_2=array([0.96363634], dtype=float32))\n", + "Row(int_feature_1=array([0.25862068], dtype=float32), int_feature_2=array([0.21818182], dtype=float32))\n", + "Row(int_feature_1=array([1.1206896], dtype=float32), int_feature_2=array([1.0363636], dtype=float32))\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "`MLTransform` learned in `write` mode that `int_feature_1` ranges from 5 to 63.\n", + "\n", + "In `read` mode, when it encounters 29 in `test_data` for `int_feature_1`, it scales it by using the following formula:\n", + "```\n", + "(value - min) / (max - min)\n", + "```\n", + "\n", + "The following calculation shows the formula with the values:\n", + "```\n", + "(29 - 5) / (63 - 5) = 0.41379312\n", + "```\n", + "\n", + "Twenty-nine is scaled based on the minimum and maximum values generated in `write` mode.\n" + ], + "metadata": { + "id": "jwSiMoAH4vq5" + } + }, + { + "cell_type": "markdown", + "source": [ + "## Scale by using the z-score\n", + "\n", + "Similar to `ScaleTo01`, use [ScaleToZScore](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.transforms.tft.html#apache_beam.ml.transforms.tft.ScaleToZScore) to scale the values by using the [z-score](https://www.tensorflow.org/tfx/transform/api_docs/python/tft/scale_to_z_score#:~:text=Scaling%20to%20z%2Dscore%20subtracts%20out%20the%20mean%20and%20divides%20by%20standard%20deviation.%20Note%20that%20the%20standard%20deviation%20computed%20here%20is%20based%20on%20the%20biased%20variance%20(0%20delta%20degrees%20of%20freedom)%2C%20as%20computed%20by%20analyzers.var).\n" + ], + "metadata": { + "id": "VH1CmCGm_PtS" + } + }, + { + "cell_type": "code", + "source": [ + "# MLTransform in write mode\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(data)\n", + "\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location_scale_to_zscore).with_transform(\n", + " ScaleToZScore(columns=['int_feature_1', 'int_feature_2'])\n", + " )\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "OOrnLBbD_Qe4", + "outputId": "4754b0ac-dc40-4a55-c999-33270e785acc" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(int_feature_1=array([-0.76950264], dtype=float32), int_feature_2=array([1.401755], dtype=float32))\n", + "Row(int_feature_1=array([0.3974355], dtype=float32), int_feature_2=array([0.2638597], dtype=float32))\n", + "Row(int_feature_1=array([-1.0739213], dtype=float32), int_feature_2=array([-1.2203515], dtype=float32))\n", + "Row(int_feature_1=array([-0.7187662], dtype=float32), int_feature_2=array([0.01649117], dtype=float32))\n", + "Row(int_feature_1=array([0.2959626], dtype=float32), int_feature_2=array([-1.3192989], dtype=float32))\n", + "Row(int_feature_1=array([1.8687923], dtype=float32), int_feature_2=array([0.8575442], dtype=float32))\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# MLTransform in read mode\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(test_data)\n", + "\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(read_artifact_location=artifact_location_scale_to_zscore)\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "lLXGBaOtCRv-", + "outputId": "3ef4d142-0be2-4350-b2c4-3b676be74cee" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(int_feature_1=array([0.14375328], dtype=float32), int_feature_2=array([0.9070179], dtype=float32))\n", + "Row(int_feature_1=array([-1.5812857], dtype=float32), int_feature_2=array([1.3522812], dtype=float32))\n", + "Row(int_feature_1=array([-1.0739213], dtype=float32), int_feature_2=array([-0.28035107], dtype=float32))\n", + "Row(int_feature_1=array([0.14375328], dtype=float32), int_feature_2=array([1.3028076], dtype=float32))\n", + "Row(int_feature_1=array([-0.31287467], dtype=float32), int_feature_2=array([-0.7256144], dtype=float32))\n", + "Row(int_feature_1=array([2.2239475], dtype=float32), int_feature_2=array([1.5007024], dtype=float32))\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "## Scale by using ScaleByMinMax\n", + "\n", + "Use [ScaleByMinMax](https://github.com/apache/beam/blob/9e8a310f0c0faddfba28176df5893d8ad8fd10a0/sdks/python/apache_beam/ml/transforms/tft.py#L450) to scale your data into the range of `[min_value, max_value]`." + ], + "metadata": { + "id": "McISy8QkC00v" + } + }, + { + "cell_type": "code", + "source": [ + "min_value = 1\n", + "max_value = 10\n", + "\n", + "# MLTransform in write mode\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(data)\n", + "\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location_scale_by_min_max).with_transform(\n", + " ScaleByMinMax(columns=['int_feature_1', 'int_feature_2'], min_value=min_value, max_value=max_value)\n", + " )\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "cpn_wKQ3Cxo0", + "outputId": "20be1e2b-36a2-4bae-dc53-5e5b5f1692be" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(int_feature_1=array([1.9310346], dtype=float32), int_feature_2=array([10.], dtype=float32))\n", + "Row(int_feature_1=array([5.5], dtype=float32), int_feature_2=array([6.2363634], dtype=float32))\n", + "Row(int_feature_1=array([1.], dtype=float32), int_feature_2=array([1.3272727], dtype=float32))\n", + "Row(int_feature_1=array([2.086207], dtype=float32), int_feature_2=array([5.418182], dtype=float32))\n", + "Row(int_feature_1=array([5.1896553], dtype=float32), int_feature_2=array([1.], dtype=float32))\n", + "Row(int_feature_1=array([10.], dtype=float32), int_feature_2=array([8.200001], dtype=float32))\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "# MLTransform in read mode\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = pipeline | \"CreateData\" >> beam.Create(test_data)\n", + "\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(read_artifact_location=artifact_location_scale_by_min_max)\n", + " )\n", + " transformed_pcoll | \"Print\" >> beam.Map(print)" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "3aQjFPzBI7dS", + "outputId": "e9967889-27eb-4295-dfad-a285d670826c" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Row(int_feature_1=array([4.7241383], dtype=float32), int_feature_2=array([8.363636], dtype=float32))\n", + "Row(int_feature_1=array([-0.5517242], dtype=float32), int_feature_2=array([9.836364], dtype=float32))\n", + "Row(int_feature_1=array([1.], dtype=float32), int_feature_2=array([4.4363637], dtype=float32))\n", + "Row(int_feature_1=array([4.7241383], dtype=float32), int_feature_2=array([9.672727], dtype=float32))\n", + "Row(int_feature_1=array([3.3275862], dtype=float32), int_feature_2=array([2.9636364], dtype=float32))\n", + "Row(int_feature_1=array([11.086206], dtype=float32), int_feature_2=array([10.327272], dtype=float32))\n" + ] + } + ] + } + ] +} diff --git a/examples/notebooks/beam-ml/data_preprocessing/vertex_ai_text_embeddings.ipynb b/examples/notebooks/beam-ml/data_preprocessing/vertex_ai_text_embeddings.ipynb new file mode 100644 index 000000000000..ed4da57e297b --- /dev/null +++ b/examples/notebooks/beam-ml/data_preprocessing/vertex_ai_text_embeddings.ipynb @@ -0,0 +1,308 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "muiqKarukWj0" + }, + "outputs": [], + "source": [ + "# @title ###### Licensed to the Apache Software Foundation (ASF), Version 2.0 (the \"License\")\n", + "\n", + "# Licensed to the Apache Software Foundation (ASF) under one\n", + "# or more contributor license agreements. See the NOTICE file\n", + "# distributed with this work for additional information\n", + "# regarding copyright ownership. The ASF licenses this file\n", + "# to you under the Apache License, Version 2.0 (the\n", + "# \"License\"); you may not use this file except in compliance\n", + "# with the License. You may obtain a copy of the License at\n", + "#\n", + "# http://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing,\n", + "# software distributed under the License is distributed on an\n", + "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n", + "# KIND, either express or implied. See the License for the\n", + "# specific language governing permissions and limitations\n", + "# under the License" + ] + }, + { + "cell_type": "markdown", + "source": [ + "# Generate text embeddings by using the Vertex AI API\n", + "\n", + "\n", + " \n", + " \n", + "
\n", + " Run in Google Colab\n", + " \n", + " View source on GitHub\n", + "
\n" + ], + "metadata": { + "id": "ZUSiAR62SgO8" + } + }, + { + "cell_type": "markdown", + "source": [ + "Text embeddings are a way to represent text as numerical vectors. This process lets computers understand and process text data, which is essential for many natural language processing (NLP) tasks.\n", + "\n", + "The following NLP tasks use embeddings:\n", + "\n", + "* **Semantic search:** Find documents or passages that are relevant to a query when the query doesn't use the exact same words as the documents.\n", + "* **Text classification:** Categorize text data into different classes, such as spam and not spam, or positive sentiment and negative sentiment.\n", + "* **Machine translation:** Translate text from one language to another and preserve the meaning.\n", + "* **Text summarization:** Create shorter summaries of text.\n", + "\n", + "This notebook uses the Vertex AI text-embeddings API to generate text embeddings that use Google’s large generative artificial intelligence (AI) models. To generate text embeddings by using the Vertex AI text-embeddings API, use `MLTransform` with the `VertexAITextEmbeddings` class to specify the model configuration. For more information, see [Get text embeddings](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings) in the Vertex AI documentation. \n", + "\n", + "For more information about using `MLTransform`, see [Preprocess data with MLTransform](https://beam.apache.org/documentation/ml/preprocess-data/) in the Apache Beam documentation.\n", + "\n", + "## Requirements\n", + "\n", + "To use the Vertex AI text-embeddings API, complete the following prerequisites:\n", + "\n", + "* Install the `google-cloud-aiplatform` Python package.\n", + "* Do one of the following tasks:\n", + " * Configure credentials for your Google Cloud project. For more information, see [Google Auth Library for Python](https://googleapis.dev/python/google-auth/latest/reference/google.auth.html#module-google.auth).\n", + " * Store the path to a service account JSON file by using the [GOOGLE_APPLICATION_CREDENTIALS](https://cloud.google.com/docs/authentication/application-default-credentials#GAC) environment variable." + ], + "metadata": { + "id": "bkpSCGCWlqAf" + } + }, + { + "cell_type": "markdown", + "source": [ + "To use your Google Cloud account, authenticate this notebook." + ], + "metadata": { + "id": "W29FgO5Qv2ew" + } + }, + { + "cell_type": "code", + "source": [ + "from google.colab import auth\n", + "auth.authenticate_user()\n", + "\n", + "project = '' # Replace with a valid Google Cloud project ID." + ], + "metadata": { + "id": "nYyyGYt3licq" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Install dependencies\n", + " Install Apache Beam and the dependencies required for the Vertex AI text-embeddings API." + ], + "metadata": { + "id": "UQROd16ZDN5y" + } + }, + { + "cell_type": "code", + "source": [ + "! pip install apache_beam[gcp]>=2.53.0 --quiet" + ], + "metadata": { + "id": "BTxob7d5DLBM" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "import tempfile\n", + "import apache_beam as beam\n", + "from apache_beam.ml.transforms.base import MLTransform\n", + "from apache_beam.ml.transforms.embeddings.vertex_ai import VertexAITextEmbeddings" + ], + "metadata": { + "id": "SkMhR7H6n1P0" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## Transform the data\n", + "\n", + "`MLTransform` is a `PTransform` that you can use for data preparation, including generating text embeddings.\n", + "\n", + "### Use MLTransform in write mode\n", + "\n", + "In `write` mode, `MLTransform` saves the transforms and their attributes to an artifact location. Then, when you run `MLTransform` in `read` mode, these transforms are used. This process ensures that you're applying the same preprocessing steps when you train your model and when you serve the model in production or test its accuracy." + ], + "metadata": { + "id": "cokOaX2kzyke" + } + }, + { + "cell_type": "markdown", + "source": [ + "### Get the data\n", + "\n", + "`MLTransform` processes dictionaries that include column names and their associated text data. To generate embeddings for specific columns, specify these column names in the `columns` argument of `VertexAITextEmbeddings`. This transform uses the the Vertex AI text-embeddings API for online predictions to generate an embeddings vector for each sentence." + ], + "metadata": { + "id": "-x7fVvuy-aDs" + } + }, + { + "cell_type": "code", + "source": [ + "artifact_location = tempfile.mkdtemp(prefix='vertex_ai')\n", + "\n", + "# Use the latest text embedding model from the Vertex AI text-embeddings API documentation.\n", + "# https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/text-embeddings\n", + "text_embedding_model_name = 'textembedding-gecko@latest'\n", + "\n", + "# Generate text embeddings on the sentences.\n", + "content = [\n", + " {\n", + " 'x' : 'I would like embeddings for this text'\n", + " },\n", + " {\n", + " 'x' : 'Hello world'\n", + " },\n", + " {\n", + " 'x': 'The Dog is running in the park.'\n", + " }\n", + " ]\n", + "\n", + "# helper function that returns a dict containing only first\n", + "# ten elements of generated embeddings\n", + "def truncate_embeddings(d):\n", + " for key in d.keys():\n", + " d[key] = d[key][:10]\n", + " return d" + ], + "metadata": { + "id": "be-vR159pylF" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "embedding_transform = VertexAITextEmbeddings(\n", + " model_name=text_embedding_model_name, columns=['x'], project=project)\n", + "\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = (\n", + " pipeline\n", + " | \"CreateData\" >> beam.Create(content))\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(write_artifact_location=artifact_location).with_transform(embedding_transform))\n", + "\n", + " # Show only the first ten elements of the embeddings to prevent clutter in the output.\n", + " transformed_pcoll | beam.Map(truncate_embeddings) | 'LogOutput' >> beam.Map(print)\n", + "\n", + " transformed_pcoll | \"PrintEmbeddingShape\" >> beam.Map(lambda x: print(f\"Embedding shape: {len(x['x'])}\"))" + ], + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "UQGm1be3p7lM", + "outputId": "b41172ca-1c73-4952-ca87-bfe45ca88a6c" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'x': [0.041293490678071976, -0.010302993468940258, -0.048611514270305634, -0.01360565796494484, 0.06441926211118698, 0.022573700174689293, 0.016446372494101524, -0.033894773572683334, 0.004581860266625881, 0.060710687190294266]}\n", + "Embedding shape: 10\n", + "{'x': [0.05889148637652397, -0.0046180677600204945, -0.06738516688346863, -0.012708292342722416, 0.06461101770401001, 0.025648491457104683, 0.023468563333153725, -0.039828114211559296, -0.009968819096684456, 0.050098177045583725]}\n", + "Embedding shape: 10\n", + "{'x': [0.04683901369571686, -0.013076924718916416, -0.082594133913517, -0.01227626483887434, 0.00417641457170248, -0.024504298344254494, 0.04282262548804283, -0.0009824123699218035, -0.02860993705689907, 0.01609829254448414]}\n", + "Embedding shape: 10\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "source": [ + "### Use MLTransform in read mode\n", + "\n", + "In `read` mode, `MLTransform` uses the artifacts saved during `write` mode. In this example, the transform and its attributes are loaded from the saved artifacts. You don't need to specify artifacts again during `read` mode.\n", + "\n", + "In this way, `MLTransform` provides consistent preprocessing steps for training and inference workloads." + ], + "metadata": { + "id": "JLkmQkiLx_6h" + } + }, + { + "cell_type": "code", + "source": [ + "test_content = [\n", + " {\n", + " 'x': 'This is a test sentence'\n", + " },\n", + " {\n", + " 'x': 'The park is full of dogs'\n", + " },\n", + "]\n", + "\n", + "with beam.Pipeline() as pipeline:\n", + " data_pcoll = (\n", + " pipeline\n", + " | \"CreateData\" >> beam.Create(test_content))\n", + " transformed_pcoll = (\n", + " data_pcoll\n", + " | \"MLTransform\" >> MLTransform(read_artifact_location=artifact_location))\n", + "\n", + " transformed_pcoll | beam.Map(truncate_embeddings) | 'LogOutput' >> beam.Map(print)\n" + ], + "metadata": { + "id": "r8Y5vgfLx_Xu", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "e7cbf6b7-5c31-4efa-90cf-7a8a108ecc77" + }, + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "{'x': [0.04782044142484665, -0.010078949853777885, -0.05793016776442528, -0.026060665026307106, 0.05756739526987076, 0.02292264811694622, 0.014818413183093071, -0.03718176111578941, -0.005486017093062401, 0.04709304869174957]}\n", + "{'x': [0.042911216616630554, -0.007554919924587011, -0.08996245265007019, -0.02607591263949871, 0.0008614308317191899, -0.023671219125390053, 0.03999944031238556, -0.02983051724731922, -0.015057179145514965, 0.022963201627135277]}\n" + ] + } + ] + } + ] +} diff --git a/examples/notebooks/beam-ml/mltransform_basic.ipynb b/examples/notebooks/beam-ml/mltransform_basic.ipynb index 43068624eb82..b0af96d08593 100644 --- a/examples/notebooks/beam-ml/mltransform_basic.ipynb +++ b/examples/notebooks/beam-ml/mltransform_basic.ipynb @@ -55,7 +55,7 @@ "id": "d3b81cf2-8603-42bd-995e-9e14631effd0" }, "source": [ - "This notebook demonstrates how to use `MLTransform` to preprocess data for machine learning workflows. Apache Beam provides a set of transforms for preprocessing data for training and inference. The `MLTransform` class wraps various transforms in one PTransform, simplifying your workflow. For a list of available preprocessing transforms see the [Preprocess data with MLTransform](https://beam.apache.org/documentation/ml/preprocess-data/#transforms) page in Apache Beam documentation.\n", + "This notebook demonstrates how to use `MLTransform` to preprocess data for machine learning workflows. Apache Beam provides a set of transforms for preprocessing data for training and inference. The `MLTransform` class wraps various transforms in one `PTransform`, simplifying your workflow. For a list of available preprocessing transforms see the [Preprocess data with MLTransform](https://beam.apache.org/documentation/ml/preprocess-data/#transforms) page in the Apache Beam documentation.\n", "\n", "This notebook uses data processing transforms defined in the [apache_beam/ml/transforms/tft](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.transforms.tft.html) module." ] diff --git a/examples/notebooks/beam-ml/per_key_models.ipynb b/examples/notebooks/beam-ml/per_key_models.ipynb index 53845c0b3e19..3e71c1d119a2 100644 --- a/examples/notebooks/beam-ml/per_key_models.ipynb +++ b/examples/notebooks/beam-ml/per_key_models.ipynb @@ -402,7 +402,7 @@ { "cell_type": "markdown", "source": [ - "Use the formatted keys to define a `KeyedModelHandler` that maps keys to the `ModelHandler` used for those keys. The `KeyedModelHandler` method lets you define an optional `max_models_per_worker_hint`, which limits the number of models that can be held in a single worker process at one time. If your worker might run out of memory, use this option. For more information about managing memory, see [Use a keyed ModelHandler](https://beam.apache.org/documentation/sdks/python-machine-learning/index.html#use-a-keyed-modelhandler)." + "Use the formatted keys to define a `KeyedModelHandler` that maps keys to the `ModelHandler` used for those keys. The `KeyedModelHandler` method lets you define an optional `max_models_per_worker_hint`, which limits the number of models that can be held in a single worker process at one time. If your worker might run out of memory, use this option. For more information about managing memory, see [Use a keyed ModelHandler](https://beam.apache.org/documentation/ml/about-ml/#use-a-keyed-modelhandler-object)." ], "metadata": { "id": "IP65_5nNGIb8" diff --git a/examples/notebooks/beam-ml/run_custom_inference.ipynb b/examples/notebooks/beam-ml/run_custom_inference.ipynb index a66c5847de0e..dde42399ff35 100644 --- a/examples/notebooks/beam-ml/run_custom_inference.ipynb +++ b/examples/notebooks/beam-ml/run_custom_inference.ipynb @@ -60,7 +60,7 @@ "NLP locates named entities in unstructured text and classifies the entities using pre-defined labels, such as person name, organization, date, and so on.\n", "\n", "This example illustrates how to use the popular `spaCy` package to load a machine learning (ML) model and perform inference in an Apache Beam pipeline using the RunInference `PTransform`.\n", - "For more information about the RunInference API, see [Machine Learning](https://beam.apache.org/documentation/sdks/python-machine-learning) in the Apache Beam documentation." + "For more information about the RunInference API, see [About Beam ML](https://beam.apache.org/documentation/ml/about-ml) in the Apache Beam documentation." ] }, { @@ -104,7 +104,7 @@ "\n", "To learn more about `spaCy`, create a `spaCy` language object in memory using `spaCy`'s trained models.\n", "You can install these models as Python packages.\n", - "For more inforamtion, see spaCy's [Models and Languages](https://spacy.io/usage/models) documentation." + "For more information, see spaCy's [Models and Languages](https://spacy.io/usage/models) documentation." ] }, { diff --git a/examples/notebooks/beam-ml/run_inference_pytorch_tensorflow_sklearn.ipynb b/examples/notebooks/beam-ml/run_inference_pytorch_tensorflow_sklearn.ipynb index a314f6cd7116..115b70b11e94 100644 --- a/examples/notebooks/beam-ml/run_inference_pytorch_tensorflow_sklearn.ipynb +++ b/examples/notebooks/beam-ml/run_inference_pytorch_tensorflow_sklearn.ipynb @@ -71,7 +71,7 @@ "You can use Apache Beam versions 2.40.0 and later with the [RunInference API](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.inference.base.html#apache_beam.ml.inference.base.RunInference) for local and remote inference with batch and streaming pipelines.\n", "The RunInference API leverages Apache Beam concepts, such as the `BatchElements` transform and the `Shared` class, to support models in your pipelines that create transforms optimized for machine learning inference.\n", "\n", - "For more information about the RunInference API, see [Machine Learning](https://beam.apache.org/documentation/sdks/python-machine-learning) in the Apache Beam documentation." + "For more information about the RunInference API, see [About Beam ML](https://beam.apache.org/documentation/ml/about-ml) in the Apache Beam documentation." ] }, { @@ -221,7 +221,7 @@ " # Pad the token tensors to max length to make sure that all of the tensors\n", " # are of the same length and stack-able by the RunInference API. Normally, you would batch first\n", " # then tokenize the batch, padding each tensor the max length in the batch.\n", - " # See: https://beam.apache.org/documentation/sdks/python-machine-learning/#unable-to-batch-tensor-elements\n", + " # See: https://beam.apache.org/documentation/ml/about-ml/#unable-to-batch-tensor-elements\n", " tokens = self._tokenizer(text_input, return_tensors='pt', padding='max_length', max_length=512)\n", " # Squeeze because tokenization adds an extra dimension, which is empty,\n", " # in this case because we tokenize one element at a time.\n", diff --git a/examples/notebooks/beam-ml/speech_emotion_tensorflow.ipynb b/examples/notebooks/beam-ml/speech_emotion_tensorflow.ipynb index 098cb150bfd2..c2dfb06a6e67 100644 --- a/examples/notebooks/beam-ml/speech_emotion_tensorflow.ipynb +++ b/examples/notebooks/beam-ml/speech_emotion_tensorflow.ipynb @@ -132,7 +132,7 @@ "* **[IPython](https://ipython.readthedocs.io/en/stable/index.html)**: Creates visualizations for multimedia content. Here we have used it for playing audio files.\n", "* **[Sklearn](https://scikit-learn.org/stable/index.html)**: Offers comprehensive tools for Machine Learning. Here we have used it for preprocessing and splitting the data.\n", "* **[TensorFlow](https://www.tensorflow.org/api_docs)** and **[Keras](https://keras.io/api/)**: Enables building and training complex Machine Learning and Deep Learning model.\n", - "* **[TFModelHandlerNumpy](https://beam.apache.org/documentation/sdks/python-machine-learning/#tensorflow)**: Defines the configuration used to load/use the model that we train. We use TFModelHandlerNumpy because the model was trained with TensorFlow and takes numpy arrays as input.\n", + "* **[TFModelHandlerNumpy](https://beam.apache.org/documentation/ml/about-ml/#tensorflow)**: Defines the configuration used to load/use the model that we train. We use TFModelHandlerNumpy because the model was trained with TensorFlow and takes numpy arrays as input.\n", "* **[RunInference](https://beam.apache.org/releases/pydoc/current/apache_beam.ml.inference.html#apache_beam.ml.inference.RunInference)**: Loads the model and obtains predictions as part of the Apache Beam pipeline. For more information, see docs on prediction and inference.\n", "* **[Apache Beam](https://beam.apache.org/documentation/)**: Builds a pipeline for Image Processing." ] diff --git a/gradle.properties b/gradle.properties index ef84ea9a5cc9..8f8736a100f6 100644 --- a/gradle.properties +++ b/gradle.properties @@ -30,8 +30,8 @@ signing.gnupg.useLegacyGpg=true # buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy. # To build a custom Beam version make sure you change it in both places, see # https://github.com/apache/beam/issues/21302. -version=2.53.0-SNAPSHOT -sdk_version=2.53.0.dev +version=2.54.0-SNAPSHOT +sdk_version=2.54.0.dev javaVersion=1.8 diff --git a/it/common/src/main/java/org/apache/beam/it/common/utils/PipelineUtils.java b/it/common/src/main/java/org/apache/beam/it/common/utils/PipelineUtils.java index d249d43d3789..e53de5bdb678 100644 --- a/it/common/src/main/java/org/apache/beam/it/common/utils/PipelineUtils.java +++ b/it/common/src/main/java/org/apache/beam/it/common/utils/PipelineUtils.java @@ -22,6 +22,8 @@ import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.PipelineRunner; import org.apache.beam.sdk.options.PipelineOptions; @@ -115,7 +117,13 @@ public static String createJobName(String prefix, int randomChars) { /** Get raw job name (without prefix) from a jobName generated by createJobName. */ public static String extractJobName(String nameWithPrefix) { - return nameWithPrefix.substring(0, nameWithPrefix.lastIndexOf("-")); + Pattern pattern = Pattern.compile("-\\d{17}"); // yyyyMMddHHmmssSSS + Matcher matcher = pattern.matcher(nameWithPrefix); + if (matcher.find()) { + return nameWithPrefix.substring(0, matcher.start()); + } else { + throw new IllegalArgumentException("Unexpected job name: " + nameWithPrefix); + } } /* diff --git a/it/common/src/test/java/org/apache/beam/it/common/utils/PipelineUtilsTest.java b/it/common/src/test/java/org/apache/beam/it/common/utils/PipelineUtilsTest.java index 316283cdf7d2..d2b8334964f5 100644 --- a/it/common/src/test/java/org/apache/beam/it/common/utils/PipelineUtilsTest.java +++ b/it/common/src/test/java/org/apache/beam/it/common/utils/PipelineUtilsTest.java @@ -51,4 +51,10 @@ public void testCreateExtractJobName() { String name = "create-job-name"; assertEquals(name, extractJobName(createJobName(name))); } + + @Test + public void testCreateExtractJobNameWithRandomChars() { + String name = "create-job-name"; + assertEquals(name, extractJobName(createJobName(name, 8))); + } } diff --git a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java index 620d24d4e117..e0f9f6c2f63d 100644 --- a/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java +++ b/it/google-cloud-platform/src/main/java/org/apache/beam/it/gcp/dataflow/DefaultPipelineLauncher.java @@ -410,9 +410,9 @@ protected LaunchInfo.Builder getJobInfoBuilder(LaunchConfig options, JobState st // config pipelineName String pipelineName = PipelineUtils.extractJobName(options.jobName()); String overrideName = null; - if (pipelineName.endsWith("write")) { + if (pipelineName.startsWith("write")) { overrideName = System.getProperty(WRITE_PIPELINE_NAME_OVERWRITE); - } else if (pipelineName.endsWith("read")) { + } else if (pipelineName.startsWith("read")) { overrideName = System.getProperty(READ_PIPELINE_NAME_OVERWRITE); } if (!Strings.isNullOrEmpty(overrideName)) { diff --git a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java index 4589f79f1aaa..214d2745f03e 100644 --- a/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java +++ b/it/google-cloud-platform/src/test/java/org/apache/beam/it/gcp/bigquery/BigQueryStreamingLT.java @@ -428,14 +428,14 @@ public CrashingBigQueryServices(Integer crashIntervalSeconds) { } @Override - public DatasetService getDatasetService(BigQueryOptions options) { - return new CrashingDatasetService(options); + public WriteStreamService getWriteStreamService(BigQueryOptions options) { + return new CrashingWriteStreamService(options); } - private class CrashingDatasetService extends BigQueryServicesImpl.DatasetServiceImpl { + private class CrashingWriteStreamService extends BigQueryServicesImpl.WriteStreamServiceImpl { private Instant lastCrash; - public CrashingDatasetService(BigQueryOptions bqOptions) { + public CrashingWriteStreamService(BigQueryOptions bqOptions) { super(bqOptions); } diff --git a/it/jdbc/src/main/java/org/apache/beam/it/jdbc/AbstractJDBCResourceManager.java b/it/jdbc/src/main/java/org/apache/beam/it/jdbc/AbstractJDBCResourceManager.java index 6d50dddb0ccd..73c55d001381 100644 --- a/it/jdbc/src/main/java/org/apache/beam/it/jdbc/AbstractJDBCResourceManager.java +++ b/it/jdbc/src/main/java/org/apache/beam/it/jdbc/AbstractJDBCResourceManager.java @@ -99,11 +99,15 @@ public String getPassword() { return password; } + public int getPort() { + return this.getPort(getJDBCPort()); + } + @Override public synchronized String getUri() { return String.format( "jdbc:%s://%s:%d/%s", - getJDBCPrefix(), this.getHost(), this.getPort(getJDBCPort()), this.getDatabaseName()); + getJDBCPrefix(), this.getHost(), this.getPort(), this.getDatabaseName()); } public abstract String getJDBCPrefix(); diff --git a/learning/prompts/code-explaination/02_io_pubsub.md b/learning/prompts/code-explaination/02_io_pubsub.md new file mode 100644 index 000000000000..de02bd58e4d8 --- /dev/null +++ b/learning/prompts/code-explaination/02_io_pubsub.md @@ -0,0 +1,33 @@ +Prompt: +What does this code do? +```python +pipeline_options = PipelineOptions(pipeline_args) + +with beam.Pipeline(options=options) as p: + (p | "Read" >> ReadFromPubSub(subscription=input_subscription) + | "Process" >> ParDo(ProcessMessage()) + | "Write" >> WriteToPubSub(topic=output_topic)) +``` +Response: +This code reads messages from the [Pub/Sub](https://cloud.google.com/pubsub/docs/overview) subscription `input_subscription` using the `ReadFromPubSub`, processes them. The code then writes the resulting collection to the Pub/Sub topic `output_topic` using the `WriteToPubSub` transform. + +Pub/Sub is only supported in streaming pipelines. + +Reading messages directly from a topic is also supported. In this case, a temporary subscription will be created automatically. + +`ReadFromPubSub` produces a `PCollection` of `PubsubMessage` objects or a `PCollection` of byte sequences. Behavior is controlled by the `with_attributes` parameter, with byte sequences being the default. +For more information about PCollections see the [Beam Programming Guide](https://beam.apache.org/documentation/basics/#pcollection). + +The `ProcessMessage` class processes the messages. This class is a subclass of the `DoFn` class. +The simplest implementation of `ProcessMesage` class might look like the following example: + +```python +class ProcessMessage(beam.DoFn): + def process(self, element): + yield element +``` +For more information about the `DoFn` class, see the [Beam Programming Guide](https://beam.apache.org/documentation/programming-guide/#dofn). + +For more details, see the [Pub/Sub I/O transform documentation](https://beam.apache.org/releases/pydoc/current/apache_beam.io.gcp.pubsub.html). + +For a common pattern for configuring pipeline options, see the [Pipeline option pattern](https://beam.apache.org/documentation/patterns/pipeline-options/). \ No newline at end of file diff --git a/learning/prompts/code-generation/02_io_pubsub.md b/learning/prompts/code-generation/02_io_pubsub.md new file mode 100644 index 000000000000..90856e8956ff --- /dev/null +++ b/learning/prompts/code-generation/02_io_pubsub.md @@ -0,0 +1,51 @@ +Prompt: +Write the python code to read messages from a Pub/Sub subscription. +Response: +You can read messages from a Pub/Sub subscription or topic using the `ReadFromPubSub` transform. Pub/Sub is only supported in streaming pipelines. + +The following Python code reads messages from a Pub/Sub subscription. The subscription is provided as a command line argument. The messages are logged to the console: + +```python +import logging + +import apache_beam as beam +from apache_beam import Map +from apache_beam.io import ReadFromPubSub +from apache_beam.options.pipeline_options import PipelineOptions + +class PubSubReadOptions(PipelineOptions): +""" +Configure pipeline options for PubSub read transform. +""" + @classmethod + def _add_argparse_args(cls, parser): + parser.add_argument( + "--subscription", + required=True, + help="Pub/Sub subscription to read from.") + +def read_subscription(): + """Read from Pub/Sub subscription function.""" + + # Parse the pipeline options. + # For streaming pipelines, you need to include streaming=True + options = PubSubReadOptions(streaming=True) + + with beam.Pipeline(options=options) as p: + # This pipeline reads from a Pub/Sub subscription and logs the messages to the console. + (p | "Read Pub/Sub subscription" >> ReadFromPubSub(subscription=options.subscription) + | "Format message" >> Map(lambda message: f"Received message:\n{message}\n") + | Map(logging.info)) + +if __name__ == '__main__': + logging.getLogger().setLevel(logging.INFO) + read_subscription() + +``` +Reading messages directly from a topic is also supported. A temporary subscription will be created automatically. + +The messages could be returned as a byte string or as `PubsubMessage` objects. This behavior is controlled by the `with_attributes` parameter. + +For more information, see the [Pub/Sub I/O transform documentation](https://beam.apache.org/releases/pydoc/current/apache_beam.io.gcp.pubsub.html). + +For a common pattern for configuring pipeline options see the [Pipeline option pattern](https://beam.apache.org/documentation/patterns/pipeline-options/). \ No newline at end of file diff --git a/learning/prompts/documentation-lookup/01_basic_learning_apache_beam.md b/learning/prompts/documentation-lookup/01_basic_learning_apache_beam.md new file mode 100644 index 000000000000..df428d910388 --- /dev/null +++ b/learning/prompts/documentation-lookup/01_basic_learning_apache_beam.md @@ -0,0 +1,14 @@ +Prompt: +What is Apache Beam? +Response: +Apache Beam is an open-source unified programming model for data processing pipelines. It provides a set of language-specific SDKs that allow you to define and execute data processing workflows. +Apache Beam allows you to write pipelines in one of the supported languages (Java, Python, Go, and Scala). The pipelines can be executed on one of the supported runners (Apache Flink, Apache Spark, and Google Cloud Dataflow). +Learning Apache Beam is a valuable skill for data processing and analysis. Here are some resources to get you started: +[Get Started](https://beam.apache.org/get-started/) section of the Apache Beam website is a perfect place to start. It provides an overview of Apache Beam and its key concepts. +For a hands-on introduction, you can start by exploring the [WordCount example](https://beam.apache.org/get-started/wordcount-example/), which demonstrates the basic concepts of Apache Beam. +The [Tour of Beam](https://tour.beam.apache.org/) is an interactive tour that teaches you core Beam concepts. It provides a sandbox environment to write and run pipelines while exploring different topics. +The [Beam Playground](https://play.beam.apache.org/) offers an interactive space to experiment with Beam's transforms and examples without installing anything on your system. After you have a basic understanding of Apache Beam, you can start to build your own pipelines. +The Apache Beam website has a number of resources that can help you get started, including [Apache Beam Documentation](https://beam.apache.org/documentation/), and [Apache Beam Community](https://beam.apache.org/community/). +For those interested in the source code, you can explore the [Apache Beam repository](https://github.com/apache/beam) on GitHub. +By leveraging these resources, you can build a strong foundation in Apache Beam and start constructing your own data processing pipelines. +Additionally, Apache Beam is an open-source project, and contributions are welcome. Whether you're fixing bugs, improving documentation, or adding new features, your work can benefit the entire Beam community. To learn more about how you can contribute, visit the [Contribute](https://beam.apache.org/contribute/#) section of the Apache Beam website. \ No newline at end of file diff --git a/learning/prompts/documentation-lookup/02_basic_pipelines.md b/learning/prompts/documentation-lookup/02_basic_pipelines.md new file mode 100644 index 000000000000..873329b821b7 --- /dev/null +++ b/learning/prompts/documentation-lookup/02_basic_pipelines.md @@ -0,0 +1,19 @@ +Prompt: +What is a Pipeline in Apache Beam? +Response: +In Apache Beam, a [pipeline](https://beam.apache.org/documentation/pipelines/design-your-pipeline/) encapsulates the entirety of a data processing task, including all the data and each step of the process. Essentially, it's a [Directed Acyclic Graph](https://en.wikipedia.org/wiki/Directed_acyclic_graph) of transformations (known as `PTransform`) applied to data collections (`PCollection`). + +The simplest pipelines in Apache Beam follow a linear flow of operations, typically adhering to a read-process-write pattern. However, pipelines can also be significantly more complex, featuring multiple input sources, output sinks, and operations (known as a `PTransform`) that can both read from and output to multiple PCollections. +For more information about pipeline design and best practices, see [Common Pipeline Patterns](https://beam.apache.org/documentation/patterns/overview/). + +To use Beam, your driver program must first create an instance of the Beam SDK class `Pipeline` (typically in the `main()` function). + +```python +import apache_beam as beam + +with beam.Pipeline() as pipeline: + pass # Build your pipeline here. +``` + +When you create your pipeline, you need to set [configuration options](https://beam.apache.org/documentation/programming-guide/#configuring-pipeline-options). You can set your pipeline’s configuration options programmatically. You can also set the options ahead of time, or read them from the command line, and then pass them to the `Pipeline` object when you create the object. This option is often easier than setting the pipeline configuration options programmatically. + diff --git a/learning/prompts/documentation-lookup/03_basic_configuring_pipelines.md b/learning/prompts/documentation-lookup/03_basic_configuring_pipelines.md new file mode 100644 index 000000000000..bf5ac7fabf81 --- /dev/null +++ b/learning/prompts/documentation-lookup/03_basic_configuring_pipelines.md @@ -0,0 +1,29 @@ +Prompt: +How do I configure pipeline in Apache Beam? +Response: +Configuring pipeline options in Apache Beam is crucial for specifying the execution environment, managing resources, and tailoring the pipeline's behavior to meet specific data processing needs and performance requirements. + +You can set pipeline options programmatically or pass them in using the command line. These options include runner-specific settings, job name, project ID (for cloud runners), machine types, number of workers, and more. + +Apache Beam offers a variety of [standard pipeline options](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/options/pipeline_options.py) that allow you to customize and optimize your data processing pipelines. + +Beam SDKs include a command-line parser that you can use to set pipeline options. Use command-line arguments in the format `--