From 713746746a5bbee9e01d6e2677bc7eb4413a5895 Mon Sep 17 00:00:00 2001 From: He Wang Date: Wed, 23 Mar 2022 01:38:26 +0800 Subject: [PATCH] [build][oceanbase] Use self-hosted agent for oceanbase cdc tests --- azure-pipelines.yml | 7 + .../jobs-template-for-self-hosted-agent.yml | 121 ++++++++++++++++++ tools/ci/stage.sh | 15 ++- 3 files changed, 142 insertions(+), 1 deletion(-) create mode 100644 tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 742032bb0fb..ce8d29cd6d9 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -47,3 +47,10 @@ stages: vmImage: 'ubuntu-20.04' run_end_to_end: false jdk: 8 + - template: tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml + parameters: # see template file for a definition of the parameters. + stage_name: ci_build_on_self_hosted_agent + test_pool_definition: + name: Flink_CDC_CI + run_end_to_end: false + jdk: 8 diff --git a/tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml b/tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml new file mode 100644 index 00000000000..f2bd0db7f27 --- /dev/null +++ b/tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml @@ -0,0 +1,121 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +parameters: + test_pool_definition: # defines the hardware pool for compilation and unit test execution. + stage_name: # defines a unique identifier for all jobs in a stage (in case the jobs are added multiple times to a stage) + run_end_to_end: # if set to 'true', the end to end tests will be executed + jdk: # the jdk version to use + +jobs: + - job: compile_${{parameters.stage_name}} + # succeeded() is needed to allow job cancellation + condition: and(succeeded(), not(eq(variables['MODE'], 'e2e'))) + pool: ${{parameters.test_pool_definition}} + timeoutInMinutes: 240 + cancelTimeoutInMinutes: 1 + workspace: + clean: all # this cleans the entire workspace directory before running a new job + # It is necessary because the custom build machines are reused for tests. + # See also https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?view=azure-devops&tabs=yaml#workspace + + steps: + # The cache task is persisting the .m2 directory between builds, so that + # we do not have to re-download all dependencies from maven central for + # each build. The hope is that downloading the cache is faster than + # all dependencies individually. + # In this configuration, we use a hash over all committed (not generated) .pom files + # as a key for the build cache (CACHE_KEY). If we have a cache miss on the hash + # (usually because a pom file has changed), we'll fall back to a key without + # the pom files (CACHE_FALLBACK_KEY). + # Offical documentation of the Cache task: https://docs.microsoft.com/en-us/azure/devops/pipelines/caching/?view=azure-devops + - task: Cache@2 + inputs: + key: $(CACHE_KEY) + restoreKeys: $(CACHE_FALLBACK_KEY) + path: $(MAVEN_CACHE_FOLDER) + continueOnError: true # continue the build even if the cache fails. + displayName: Cache Maven local repo + - script: | + echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64" + echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH" + displayName: "Set JDK" + # Compile + - script: | + ./tools/ci/compile.sh || exit $? + ./tools/azure-pipelines/create_build_artifact.sh + displayName: Compile + + # upload artifacts for next stage + - task: PublishPipelineArtifact@1 + inputs: + targetPath: $(FLINK_ARTIFACT_DIR) + artifact: FlinkCompileArtifact-${{parameters.stage_name}} + + - job: test_${{parameters.stage_name}} + dependsOn: compile_${{parameters.stage_name}} + condition: and(succeeded(), not(eq(variables['MODE'], 'e2e'))) + pool: ${{parameters.test_pool_definition}} + timeoutInMinutes: 240 + cancelTimeoutInMinutes: 1 + workspace: + clean: all + strategy: + matrix: + oceanbase: + module: oceanbase + steps: + # download artifact from compile stage + - task: DownloadPipelineArtifact@2 + inputs: + path: $(FLINK_ARTIFACT_DIR) + artifact: FlinkCompileArtifact-${{parameters.stage_name}} + + - script: ./tools/azure-pipelines/unpack_build_artifact.sh + displayName: "Unpack Build artifact" + + - task: Cache@2 + inputs: + key: $(CACHE_KEY) + restoreKeys: $(CACHE_FALLBACK_KEY) + path: $(MAVEN_CACHE_FOLDER) + continueOnError: true # continue the build even if the cache fails. + condition: not(eq('${{parameters.test_pool_definition.name}}', 'Default')) + displayName: Cache Maven local repo + + - script: | + echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64" + echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH" + displayName: "Set JDK" + + - script: sudo sysctl -w kernel.core_pattern=core.%p + displayName: Set coredump pattern + + # Test + - script: ./tools/azure-pipelines/uploading_watchdog.sh ./tools/ci/test_controller.sh $(module) + displayName: Test - $(module) + + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFormat: 'JUnit' + + # upload debug artifacts + - task: PublishPipelineArtifact@1 + condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', '')) + displayName: Upload Logs + inputs: + targetPath: $(DEBUG_FILES_OUTPUT_DIR) + artifact: logs-${{parameters.stage_name}}-$(DEBUG_FILES_NAME) diff --git a/tools/ci/stage.sh b/tools/ci/stage.sh index 767fc5b2414..d21df0c9286 100755 --- a/tools/ci/stage.sh +++ b/tools/ci/stage.sh @@ -22,6 +22,7 @@ STAGE_ORACLE="oracle" STAGE_MONGODB="mongodb" STAGE_SQLSERVER="sqlserver" STAGE_TIDB="tidb" +STAGE_OCEANBASE="oceanbase" STAGE_E2E="e2e" STAGE_MISC="misc" @@ -49,6 +50,10 @@ MODULES_TIDB="\ flink-connector-tidb-cdc,\ flink-sql-connector-tidb-cdc" +MODULES_OCEANBASE="\ +flink-connector-oceanbase-cdc,\ +flink-sql-connector-oceanbase-cdc" + MODULES_E2E="\ flink-cdc-e2e-tests" @@ -74,6 +79,9 @@ function get_compile_modules_for_stage() { (${STAGE_TIDB}) echo "-pl $MODULES_TIDB -am" ;; + (${STAGE_OCEANBASE}) + echo "-pl $MODULES_OCEANBASE -am" + ;; (${STAGE_E2E}) # compile everything; using the -am switch does not work with negated module lists! # the negation takes precedence, thus not all required modules would be built @@ -96,6 +104,7 @@ function get_test_modules_for_stage() { local modules_mongodb=$MODULES_MONGODB local modules_sqlserver=$MODULES_SQLSERVER local modules_tidb=$MODULES_TIDB + local modules_oceanbase=$MODULES_OCEANBASE local modules_e2e=$MODULES_E2E local negated_mysql=\!${MODULES_MYSQL//,/,\!} local negated_postgres=\!${MODULES_POSTGRES//,/,\!} @@ -103,8 +112,9 @@ function get_test_modules_for_stage() { local negated_mongodb=\!${MODULES_MONGODB//,/,\!} local negated_sqlserver=\!${MODULES_SQLSERVER//,/,\!} local negated_tidb=\!${MODULES_TIDB//,/,\!} + local negated_oceanbase=\!${MODULES_OCEANBASE//,/,\!} local negated_e2e=\!${MODULES_E2E//,/,\!} - local modules_misc="$negated_mysql,$negated_postgres,$negated_oracle,$negated_mongodb,$negated_sqlserver,$negated_tidb,$negated_e2e" + local modules_misc="$negated_mysql,$negated_postgres,$negated_oracle,$negated_mongodb,$negated_sqlserver,$negated_tidb,$negated_oceanbase,$negated_e2e" case ${stage} in (${STAGE_MYSQL}) @@ -125,6 +135,9 @@ function get_test_modules_for_stage() { (${STAGE_TIDB}) echo "-pl $modules_tidb" ;; + (${STAGE_OCEANBASE}) + echo "-pl $modules_oceanbase" + ;; (${STAGE_E2E}) echo "-pl $modules_e2e" ;;