Skip to content

Commit

Permalink
[build][oceanbase] Use self-hosted agent for oceanbase cdc tests
Browse files Browse the repository at this point in the history
  • Loading branch information
whhe authored and leonardBang committed Mar 23, 2022
1 parent 28b1b07 commit 7137467
Show file tree
Hide file tree
Showing 3 changed files with 142 additions and 1 deletion.
7 changes: 7 additions & 0 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,10 @@ stages:
vmImage: 'ubuntu-20.04'
run_end_to_end: false
jdk: 8
- template: tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml
parameters: # see template file for a definition of the parameters.
stage_name: ci_build_on_self_hosted_agent
test_pool_definition:
name: Flink_CDC_CI
run_end_to_end: false
jdk: 8
121 changes: 121 additions & 0 deletions tools/azure-pipelines/jobs-template-for-self-hosted-agent.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

parameters:
test_pool_definition: # defines the hardware pool for compilation and unit test execution.
stage_name: # defines a unique identifier for all jobs in a stage (in case the jobs are added multiple times to a stage)
run_end_to_end: # if set to 'true', the end to end tests will be executed
jdk: # the jdk version to use

jobs:
- job: compile_${{parameters.stage_name}}
# succeeded() is needed to allow job cancellation
condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
pool: ${{parameters.test_pool_definition}}
timeoutInMinutes: 240
cancelTimeoutInMinutes: 1
workspace:
clean: all # this cleans the entire workspace directory before running a new job
# It is necessary because the custom build machines are reused for tests.
# See also https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?view=azure-devops&tabs=yaml#workspace

steps:
# The cache task is persisting the .m2 directory between builds, so that
# we do not have to re-download all dependencies from maven central for
# each build. The hope is that downloading the cache is faster than
# all dependencies individually.
# In this configuration, we use a hash over all committed (not generated) .pom files
# as a key for the build cache (CACHE_KEY). If we have a cache miss on the hash
# (usually because a pom file has changed), we'll fall back to a key without
# the pom files (CACHE_FALLBACK_KEY).
# Offical documentation of the Cache task: https://docs.microsoft.com/en-us/azure/devops/pipelines/caching/?view=azure-devops
- task: Cache@2
inputs:
key: $(CACHE_KEY)
restoreKeys: $(CACHE_FALLBACK_KEY)
path: $(MAVEN_CACHE_FOLDER)
continueOnError: true # continue the build even if the cache fails.
displayName: Cache Maven local repo
- script: |
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
displayName: "Set JDK"
# Compile
- script: |
./tools/ci/compile.sh || exit $?
./tools/azure-pipelines/create_build_artifact.sh
displayName: Compile
# upload artifacts for next stage
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(FLINK_ARTIFACT_DIR)
artifact: FlinkCompileArtifact-${{parameters.stage_name}}

- job: test_${{parameters.stage_name}}
dependsOn: compile_${{parameters.stage_name}}
condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
pool: ${{parameters.test_pool_definition}}
timeoutInMinutes: 240
cancelTimeoutInMinutes: 1
workspace:
clean: all
strategy:
matrix:
oceanbase:
module: oceanbase
steps:
# download artifact from compile stage
- task: DownloadPipelineArtifact@2
inputs:
path: $(FLINK_ARTIFACT_DIR)
artifact: FlinkCompileArtifact-${{parameters.stage_name}}

- script: ./tools/azure-pipelines/unpack_build_artifact.sh
displayName: "Unpack Build artifact"

- task: Cache@2
inputs:
key: $(CACHE_KEY)
restoreKeys: $(CACHE_FALLBACK_KEY)
path: $(MAVEN_CACHE_FOLDER)
continueOnError: true # continue the build even if the cache fails.
condition: not(eq('${{parameters.test_pool_definition.name}}', 'Default'))
displayName: Cache Maven local repo

- script: |
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
displayName: "Set JDK"
- script: sudo sysctl -w kernel.core_pattern=core.%p
displayName: Set coredump pattern

# Test
- script: ./tools/azure-pipelines/uploading_watchdog.sh ./tools/ci/test_controller.sh $(module)
displayName: Test - $(module)

- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFormat: 'JUnit'

# upload debug artifacts
- task: PublishPipelineArtifact@1
condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', ''))
displayName: Upload Logs
inputs:
targetPath: $(DEBUG_FILES_OUTPUT_DIR)
artifact: logs-${{parameters.stage_name}}-$(DEBUG_FILES_NAME)
15 changes: 14 additions & 1 deletion tools/ci/stage.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ STAGE_ORACLE="oracle"
STAGE_MONGODB="mongodb"
STAGE_SQLSERVER="sqlserver"
STAGE_TIDB="tidb"
STAGE_OCEANBASE="oceanbase"
STAGE_E2E="e2e"
STAGE_MISC="misc"

Expand Down Expand Up @@ -49,6 +50,10 @@ MODULES_TIDB="\
flink-connector-tidb-cdc,\
flink-sql-connector-tidb-cdc"

MODULES_OCEANBASE="\
flink-connector-oceanbase-cdc,\
flink-sql-connector-oceanbase-cdc"

MODULES_E2E="\
flink-cdc-e2e-tests"

Expand All @@ -74,6 +79,9 @@ function get_compile_modules_for_stage() {
(${STAGE_TIDB})
echo "-pl $MODULES_TIDB -am"
;;
(${STAGE_OCEANBASE})
echo "-pl $MODULES_OCEANBASE -am"
;;
(${STAGE_E2E})
# compile everything; using the -am switch does not work with negated module lists!
# the negation takes precedence, thus not all required modules would be built
Expand All @@ -96,15 +104,17 @@ function get_test_modules_for_stage() {
local modules_mongodb=$MODULES_MONGODB
local modules_sqlserver=$MODULES_SQLSERVER
local modules_tidb=$MODULES_TIDB
local modules_oceanbase=$MODULES_OCEANBASE
local modules_e2e=$MODULES_E2E
local negated_mysql=\!${MODULES_MYSQL//,/,\!}
local negated_postgres=\!${MODULES_POSTGRES//,/,\!}
local negated_oracle=\!${MODULES_ORACLE//,/,\!}
local negated_mongodb=\!${MODULES_MONGODB//,/,\!}
local negated_sqlserver=\!${MODULES_SQLSERVER//,/,\!}
local negated_tidb=\!${MODULES_TIDB//,/,\!}
local negated_oceanbase=\!${MODULES_OCEANBASE//,/,\!}
local negated_e2e=\!${MODULES_E2E//,/,\!}
local modules_misc="$negated_mysql,$negated_postgres,$negated_oracle,$negated_mongodb,$negated_sqlserver,$negated_tidb,$negated_e2e"
local modules_misc="$negated_mysql,$negated_postgres,$negated_oracle,$negated_mongodb,$negated_sqlserver,$negated_tidb,$negated_oceanbase,$negated_e2e"

case ${stage} in
(${STAGE_MYSQL})
Expand All @@ -125,6 +135,9 @@ function get_test_modules_for_stage() {
(${STAGE_TIDB})
echo "-pl $modules_tidb"
;;
(${STAGE_OCEANBASE})
echo "-pl $modules_oceanbase"
;;
(${STAGE_E2E})
echo "-pl $modules_e2e"
;;
Expand Down

0 comments on commit 7137467

Please sign in to comment.