chore: update paths to composite actions #34
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: build-test-release | |
on: | |
workflow_call: | |
inputs: | |
marker: | |
required: false | |
description: 'Parallel run marker' | |
type: string | |
default: >- | |
[""] | |
secrets: | |
GH_TOKEN_ADMIN: | |
description: Github admin token | |
required: true | |
SEMGREP_PUBLISH_TOKEN: | |
description: Semgrep token | |
required: true | |
AWS_ACCESS_KEY_ID: | |
description: AWS access key id | |
required: true | |
AWS_DEFAULT_REGION: | |
description: AWS default region | |
required: true | |
AWS_SECRET_ACCESS_KEY: | |
description: AWS secret access key | |
required: true | |
VT_API_KEY: | |
description: Virustotal api key | |
required: true | |
OTHER_TA_REQUIRED_CONFIGS: | |
description: other required configs | |
required: true | |
FOSSA_API_KEY: | |
description: API token for FOSSA app | |
required: true | |
SA_GH_USER_NAME: | |
description: GPG signature username | |
required: true | |
SA_GH_USER_EMAIL: | |
description: GPG signature user email | |
required: true | |
SA_GPG_PRIVATE_KEY: | |
description: GPG signature private key | |
required: true | |
SA_GPG_PASSPHRASE: | |
description: GPG signature passphrase | |
required: true | |
SPL_COM_USER: | |
description: username to splunk.com | |
required: true | |
SPL_COM_PASSWORD: | |
description: password to splunk.com | |
required: true | |
permissions: | |
contents: read | |
packages: read | |
concurrency: | |
group: ${{ github.head_ref || github.run_id }} | |
cancel-in-progress: true | |
jobs: | |
setup-workflow: | |
runs-on: ubuntu-latest | |
outputs: | |
skip-workflow: ${{ steps.skip-workflow.outputs.skip-workflow }} | |
delay-destroy-ko: ${{ steps.delay-destroy-setup.outputs.delay-destroy-ko }} | |
delay-destroy-ui: ${{ steps.delay-destroy-setup.outputs.delay-destroy-ui }} | |
delay-destroy-modinput_functional: ${{ steps.delay-destroy-setup.outputs.delay-destroy-modinput_functional }} | |
delay-destroy-escu: ${{ steps.delay-destroy-setup.outputs.delay-destroy-escu }} | |
delay-destroy-scripted_inputs: ${{ steps.delay-destroy-setup.outputs.delay-destroy-scripted_inputs }} | |
delay-destroy-requirement_test: ${{ steps.delay-destroy-setup.outputs.delay-destroy-requirement_test }} | |
execute-ko: ${{ steps.delay-destroy-setup.outputs.execute-ko }} | |
execute-ui: ${{ steps.delay-destroy-setup.outputs.execute-ui }} | |
execute-escu: ${{ steps.delay-destroy-setup.outputs.execute-escu }} | |
execute-modinput_functional: ${{ steps.delay-destroy-setup.outputs.execute-modinput_functional }} | |
execute-scripted_inputs: ${{ steps.delay-destroy-setup.outputs.execute-scripted_inputs }} | |
execute-requirement_test: ${{ steps.delay-destroy-setup.outputs.execute-requirement_test }} | |
execute-knowledge-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_knowledge_labeled }} | |
execute-ui-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_ui_labeled }} | |
execute-escu-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_escu_labeled }} | |
execute-modinput-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_modinput_functional_labeled }} | |
execute-scripted_inputs-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_scripted_inputs_labeled }} | |
execute-requirement-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_requirement_test_labeled }} | |
steps: | |
- name: skip workflow if description is empty for labeled pr | |
id: skip-workflow | |
env: | |
PR_BODY: ${{ github.event.pull_request.body }} | |
run: | | |
set +e | |
TESTSET="knowledge ui modinput_functional scripted_inputs escu requirement_test" | |
echo "testset=$TESTSET" >> "$GITHUB_OUTPUT" | |
SKIP_WORKFLOW="No" | |
if [[ '${{ github.event.action }}' == 'labeled' && '${{ github.event.label.name }}' == 'preserve_infra' ]]; then | |
echo "$PR_BODY" >> body.txt | |
SKIP_WORKFLOW="Yes" | |
tests=$(grep -i "^preserve:" body.txt | { grep -v grep || true; }) | |
for test_type in $TESTSET; do | |
if [[ $tests =~ $test_type ]]; then | |
SKIP_WORKFLOW="No" | |
fi | |
done | |
fi | |
echo "skip-workflow=$SKIP_WORKFLOW" >> "$GITHUB_OUTPUT" | |
if [ "$SKIP_WORKFLOW" == "Yes" ]; then | |
echo "No description is provided with preserve infra label" | |
fi | |
- name: setup for delay destroy | |
id: delay-destroy-setup | |
shell: bash | |
env: | |
PR_BODY: ${{ github.event.pull_request.body }} | |
run: | | |
set +e | |
TESTSET="${{ steps.skip-workflow.outputs.testset }}" | |
for test_type in $TESTSET; do | |
eval DELAY_DESTROY_$test_type="No" | |
eval EXECUTE_$test_type="No" | |
done | |
if [[ '${{ github.event.label.name }}' == 'preserve_infra' ]]; then | |
echo "$PR_BODY" >> body.txt | |
tests=$(grep -i "^preserve:" body.txt | { grep -v grep || true; }) | |
if [[ $tests =~ "escu" ]]; then | |
echo "preserve_infra for escu test-type is not supported yet" | |
fi | |
for test_type in $TESTSET; do | |
if [[ $tests =~ $test_type ]]; then | |
eval EXECUTE_$test_type="Yes" | |
eval DELAY_DESTROY_$test_type="Yes" | |
fi | |
done | |
fi | |
# PRESERVE_INFRA for escu test-type is not supported yet. | |
DELAY_DESTROY_escu="No" | |
{ | |
echo "delay-destroy-ko=$DELAY_DESTROY_knowledge" | |
echo "delay-destroy-ui=$DELAY_DESTROY_ui" | |
echo "delay-destroy-modinput_functional=$DELAY_DESTROY_modinput_functional" | |
echo "delay-destroy-scripted_inputs=$DELAY_DESTROY_scripted_inputs" | |
echo "delay-destroy-escu=$DELAY_DESTROY_escu" | |
echo "delay-destroy-requirement_test=$DELAY_DESTROY_requirement_test" | |
echo "execute-ko=$EXECUTE_knowledge" | |
echo "execute-ui=$EXECUTE_ui" | |
echo "execute-modinput_functional=$EXECUTE_modinput_functional" | |
echo "execute-scripted_inputs=$EXECUTE_scripted_inputs" | |
echo "execute-escu=$EXECUTE_escu" | |
echo "execute-requirement_test=$EXECUTE_requirement_test" | |
} >> "$GITHUB_OUTPUT" | |
- name: configure tests based on labels | |
id: configure-tests-on-labels | |
run: | | |
set +e | |
declare -A EXECUTE_LABELED | |
TESTSET=("execute_knowledge" "execute_ui" "execute_modinput_functional" "execute_scripted_inputs" "execute_escu" "execute_requirement_test") | |
for test_type in "${TESTSET[@]}"; do | |
EXECUTE_LABELED["$test_type"]="false" | |
done | |
case "${{ github.event_name }}" in | |
"pull_request") | |
if ${{ github.base_ref == 'main' }} || ${{ contains(github.event.pull_request.labels.*.name, 'execute_all_tests') }}; then | |
for test_type in "${TESTSET[@]}"; do | |
EXECUTE_LABELED["$test_type"]="true" | |
done | |
else | |
labels=$(echo '${{ toJSON(github.event.pull_request.labels) }}' | jq -r '.[] | .name') | |
for test_type in "${TESTSET[@]}"; do | |
if [[ "$labels" =~ $test_type ]]; then | |
EXECUTE_LABELED["$test_type"]="true" | |
fi | |
done | |
fi | |
;; | |
"push") | |
if ${{ github.ref_name == 'main' }} || ${{ github.ref_name == 'develop' }} || ${{ github.ref_type == 'tag' }} ; then | |
for test_type in "${TESTSET[@]}"; do | |
EXECUTE_LABELED["$test_type"]="true" | |
done | |
fi | |
;; | |
"schedule") | |
for test_type in "${TESTSET[@]}"; do | |
EXECUTE_LABELED["$test_type"]="true" | |
done | |
;; | |
*) | |
echo "No tests were labeled for execution!" | |
;; | |
esac | |
echo "Tests to execute based on labels:" | |
for test_type in "${TESTSET[@]}"; do | |
echo "$test_type""_labeled=${EXECUTE_LABELED["$test_type"]}" >> "$GITHUB_OUTPUT" | |
echo "$test_type""_labeled: ${EXECUTE_LABELED["$test_type"]}" | |
done | |
validate-pr-title: | |
name: Validate PR title | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' && github.event_name == 'pull_request' }} | |
permissions: | |
contents: read | |
packages: read | |
pull-requests: read | |
statuses: write | |
steps: | |
- name: Run title validation | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/validate-pr-title | |
with: | |
GITHUB_TOKEN: ${{ github.token }} | |
meta: | |
name: Prepare metadata | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
outputs: | |
sc4s: ${{ steps.meta.outputs.sc4s }} | |
container_tags: ${{ steps.meta.outputs.container_tags }} | |
container_labels: ${{ steps.meta.outputs.container_labels }} | |
container_buildtime: ${{ steps.meta.outputs.container_buildtime }} | |
container_version: ${{ steps.meta.outputs.container_version }} | |
container_revision: ${{ steps.meta.outputs.container_revision }} | |
container_base: ${{ steps.meta.outputs.container_base }} | |
matrix_supportedSplunk: ${{ steps.meta.outputs.matrix_supportedSplunk }} | |
matrix_latestSplunk: ${{ steps.meta.outputs.matrix_latestSplunk }} | |
matrix_supportedSC4S: ${{ steps.meta.outputs.matrix_supportedSC4S }} | |
matrix_supportedModinputFunctionalVendors: ${{ steps.meta.outputs.matrix_supportedModinputFunctionalVendors }} | |
matrix_supportedUIVendors: ${{ steps.meta.outputs.matrix_supportedUIVendors }} | |
python39_splunk: ${{ steps.meta.outputs.python39_splunk }} | |
python39_sc4s: ${{ steps.meta.outputs.python39_sc4s }} | |
steps: | |
- name: Run meta preparation | |
id: meta | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/meta | |
with: | |
SA_GH_USER_NAME: ${{ secrets.SA_GH_USER_NAME }} | |
SA_GH_USER_EMAIL: ${{ secrets.SA_GH_USER_EMAIL }} | |
SA_GPG_PRIVATE_KEY: ${{ secrets.SA_GPG_PRIVATE_KEY }} | |
SA_GPG_PASSPHRASE: ${{ secrets.SA_GPG_PASSPHRASE }} | |
fossa-scan: | |
name: FOSSA scan | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
steps: | |
- name: Run FOSSA scan | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/fossa-scan | |
with: | |
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} | |
fossa-test: | |
continue-on-error: true | |
name: FOSSA test | |
runs-on: ubuntu-latest | |
needs: | |
- fossa-scan | |
steps: | |
- name: Run FOSSA test | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/fossa-test | |
with: | |
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} | |
compliance-copyrights: | |
name: Compliance copyrights | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
steps: | |
- name: Run compliance copyrights | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/compliance-copyrights | |
lint: | |
name: Lint | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
steps: | |
- name: Run linting checks | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/lint | |
review-secrets: | |
name: Review secrets | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
steps: | |
- name: Run secrets review | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/review-secrets | |
semgrep: | |
name: Semgrep security check | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
steps: | |
- name: Run semgrep | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/semgrep | |
with: | |
SEMGREP_PUBLISH_TOKEN: ${{ secrets.SEMGREP_PUBLISH_TOKEN }} | |
test-inventory: | |
name: Test inventory | |
runs-on: ubuntu-latest | |
needs: | |
- setup-workflow | |
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | |
outputs: | |
unit: ${{ steps.test-inventory.outputs.unit }} | |
ucc_modinput_functional: ${{ steps.test-inventory.outputs.ucc_modinput_functional}} | |
modinput_functional: ${{ steps.test-inventory.outputs.modinput_functional}} | |
requirement_test: ${{ steps.test-inventory.outputs.requirement_test }} | |
knowledge: ${{ steps.test-inventory.outputs.knowledge }} | |
ui: ${{ steps.test-inventory.outputs.ui }} | |
scripted_inputs: ${{ steps.test-inventory.outputs.scripted_inputs }} | |
escu: ${{ steps.test-inventory.outputs.escu }} | |
steps: | |
- name: Run test inventory check | |
id: test-inventory | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/test-inventory | |
# Two separate unit test jobs needed as jobs that depend on unit-test success can't proceed | |
# if any matrix job fails. Currently python 3.9 may fail as it's not supported in all TAs. | |
# TODO: group these jobs into the matrix once python 3.9 is supported | |
run-unit-tests-3_7: | |
name: Unit tests python 3.7 | |
if: ${{ needs.test-inventory.outputs.unit == 'true' }} | |
runs-on: ubuntu-latest | |
needs: | |
- test-inventory | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- name: Run unit tests for python 3.7 | |
id: unit-tests-3_7 | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/unit-tests | |
with: | |
python_version: '3.7' | |
GH_TOKEN_ADMIN: ${{ secrets.GH_TOKEN_ADMIN }} | |
run-unit-tests-3_9: | |
name: Unit tests python 3.9 | |
if: ${{ needs.test-inventory.outputs.unit == 'true' }} | |
runs-on: ubuntu-latest | |
continue-on-error: true | |
needs: | |
- test-inventory | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- name: Run unit tests for python 3.9 | |
id: unit-tests-3_9 | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/unit-tests | |
with: | |
python_version: '3.9' | |
GH_TOKEN_ADMIN: ${{ secrets.GH_TOKEN_ADMIN }} | |
build: | |
name: Build python 3.7 | |
runs-on: ubuntu-latest | |
needs: | |
- test-inventory | |
- meta | |
- compliance-copyrights | |
- lint | |
- review-secrets | |
- semgrep | |
- run-unit-tests-3_7 | |
if: ${{ !cancelled() && (needs.run-unit-tests-3_7.result == 'success' || needs.run-unit-tests-3_7.result == 'skipped') }} | |
outputs: | |
buildname: ${{ steps.build.outputs.buildname }} | |
permissions: | |
contents: write | |
packages: read | |
steps: | |
- name: Run build 3.7 | |
id: build | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/build | |
with: | |
python_version: "3.7" | |
SA_GH_USER_NAME: ${{ secrets.SA_GH_USER_NAME }} | |
SA_GH_USER_EMAIL: ${{ secrets.SA_GH_USER_EMAIL }} | |
SA_GPG_PRIVATE_KEY: ${{ secrets.SA_GPG_PRIVATE_KEY }} | |
SA_GPG_PASSPHRASE: ${{ secrets.SA_GPG_PASSPHRASE }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
ucc_modinput_functional: ${{ needs.test-inventory.outputs.ucc_modinput_functional}} | |
modinput_functional: ${{ needs.test-inventory.outputs.modinput_functional}} | |
build-3_9: | |
name: Build python 3.9 | |
runs-on: ubuntu-latest | |
needs: | |
- test-inventory | |
- meta | |
- compliance-copyrights | |
- lint | |
- review-secrets | |
- semgrep | |
- run-unit-tests-3_7 | |
if: ${{ !cancelled() && (needs.run-unit-tests-3_7.result == 'success' || needs.run-unit-tests-3_7.result == 'skipped') }} | |
permissions: | |
contents: write | |
packages: read | |
steps: | |
- name: Run build 3.9 | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/build | |
with: | |
python_version: "3.9" | |
SA_GH_USER_NAME: ${{ secrets.SA_GH_USER_NAME }} | |
SA_GH_USER_EMAIL: ${{ secrets.SA_GH_USER_EMAIL }} | |
SA_GPG_PRIVATE_KEY: ${{ secrets.SA_GPG_PRIVATE_KEY }} | |
SA_GPG_PASSPHRASE: ${{ secrets.SA_GPG_PASSPHRASE }} | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
ucc_modinput_functional: ${{ needs.test-inventory.outputs.ucc_modinput_functional}} | |
modinput_functional: ${{ needs.test-inventory.outputs.modinput_functional}} | |
virustotal: | |
continue-on-error: true | |
runs-on: ubuntu-latest | |
needs: build | |
if: ${{ !cancelled() && needs.build.result == 'success' }} | |
steps: | |
- name: Run VirusTotal check | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/virustotal | |
with: | |
VT_API_KEY: ${{ secrets.VT_API_KEY }} | |
run-requirements-unit-tests: | |
name: Requirements unit tests | |
runs-on: ubuntu-latest | |
needs: | |
- build | |
- test-inventory | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' }} | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- name: Run requirements unit tests | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/requirements-unit-tests | |
appinspect-cli: | |
name: AppInspect CLI ${{ matrix.tags }} | |
runs-on: ubuntu-latest | |
needs: build | |
if: ${{ !cancelled() && needs.build.result == 'success' }} | |
strategy: | |
fail-fast: false | |
matrix: | |
tags: | |
- "cloud" | |
- "appapproval" | |
- "deprecated_feature" | |
- "developer_guidance" | |
- "future" | |
- "self-service" | |
- "splunk_appinspect" | |
- "manual" | |
steps: | |
- name: Run appinspect CLI | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/appinspect-cli | |
with: | |
matrix_tags: ${{ matrix.tags }} | |
appinspect-api: | |
name: AppInspect API ${{ matrix.tags }} | |
runs-on: ubuntu-latest | |
needs: build | |
if: | | |
!cancelled() && | |
needs.build.result == 'success' && | |
( github.base_ref == 'main' || github.ref_name == 'main' ) | |
strategy: | |
fail-fast: false | |
matrix: | |
tags: | |
- "cloud" | |
steps: | |
- name: Run appinspect API | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/appinspect-api | |
with: | |
matrix_tags: ${{ matrix.tags }} | |
SPL_COM_USER: ${{ secrets.SPL_COM_USER }} | |
SPL_COM_PASSWORD: ${{ secrets.SPL_COM_PASSWORD }} | |
artifact-registry: | |
name: Artifact registry | |
runs-on: ubuntu-latest | |
needs: | |
- virustotal | |
- meta | |
if: ${{ !cancelled() && needs.virustotal.result == 'success' && needs.meta.result == 'success' }} | |
permissions: | |
contents: read | |
packages: write | |
steps: | |
- name: Run artifact registry | |
uses: splunk/addonfactory-workflow-addon-release/.github/actions/artifact-registry | |
with: | |
sc4s: ${{ needs.meta.outputs.sc4s }} | |
setup: | |
needs: | |
- build | |
- test-inventory | |
if: ${{ !cancelled() && needs.build.result == 'success' }} | |
runs-on: ubuntu-latest | |
outputs: | |
argo-server: ${{ steps.test-setup.outputs.argo-server }} | |
argo-http1: ${{ steps.test-setup.outputs.argo-http1 }} | |
argo-secure: ${{ steps.test-setup.outputs.argo-secure }} | |
spl-host-suffix: ${{ steps.test-setup.outputs.spl-host-suffix }} | |
argo-href: "" | |
argo-base-href: ${{ steps.test-setup.outputs.argo-base-href }} | |
argo-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-workflow-tmpl-name }} | |
argo-cancel-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-cancel-workflow-tmpl-name }} | |
k8s-manifests-branch: ${{ steps.test-setup.outputs.k8s-manifests-branch }} | |
argo-namespace: ${{ steps.test-setup.outputs.argo-namespace }} | |
addon-name: ${{ steps.test-setup.outputs.addon-name }} | |
job-name: ${{ steps.test-setup.outputs.job-name }} | |
labels: ${{ steps.test-setup.outputs.labels }} | |
addon-upload-path: ${{ steps.test-setup.outputs.addon-upload-path }} | |
directory-path: ${{ steps.test-setup.outputs.directory-path }} | |
s3-bucket: ${{ steps.test-setup.outputs.s3-bucket }} | |
env: | |
BUILD_NAME: ${{ needs.build.outputs.buildname }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
token: ${{ secrets.GH_TOKEN_ADMIN }} | |
- name: setup for test | |
id: test-setup | |
shell: bash | |
run: | | |
sudo apt-get install -y crudini | |
ADDON_NAME=$(crudini --get package/default/app.conf id name | tr '[:lower:]' '[:upper:]') | |
if [[ -n $(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') ]]; | |
then | |
ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') | |
elif [[ -n $(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') ]]; | |
then | |
ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') | |
fi | |
JOB_NAME=$(echo "$ADDON_NAME" | tail -c 16)-$(echo "${GITHUB_SHA}" | tail -c 8)-TEST-TYPE-${GITHUB_RUN_ID} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
LABELS="addon-name=${ADDON_NAME}" | |
ADDON_UPLOAD_PATH="s3://ta-production-artifacts/ta-apps/${{ needs.build.outputs.buildname }}" | |
{ | |
echo "argo-server=argo.wfe.splgdi.com:443" | |
echo "argo-http1=true" | |
echo "argo-secure=true" | |
echo "argo-base-href=\'\'" | |
echo "argo-namespace=workflows" | |
echo "argo-workflow-tmpl-name=ta-workflow" | |
echo "argo-cancel-workflow-tmpl-name=cancel-workflow" | |
echo "directory-path=/tmp" | |
echo "s3-bucket=ta-production-artifacts" | |
echo "addon-name=\"$ADDON_NAME\"" | |
echo "job-name=wf-$JOB_NAME" | |
echo "labels=$LABELS" | |
echo "addon-upload-path=$ADDON_UPLOAD_PATH" | |
echo "spl-host-suffix=wfe.splgdi.com" | |
echo "k8s-manifests-branch=main" | |
} >> "$GITHUB_OUTPUT" | |
- uses: actions/download-artifact@v4 | |
if: ${{ needs.test-inventory.outputs.ucc_modinput_functional == 'true' && needs.test-inventory.outputs.modinput_functional == 'true'}} | |
id: download-openapi | |
with: | |
name: artifact-openapi | |
path: ${{ github.workspace }} | |
- name: Setup python | |
if: steps.download-openapi.conclusion != 'skipped' | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.7 | |
- name: modinput-test-prerequisites | |
if: steps.download-openapi.conclusion != 'skipped' | |
shell: bash | |
env: | |
PYTHON_KEYRING_BACKEND: keyring.backends.null.Keyring | |
run: | | |
sudo pip3 install poetry==1.5.1 | |
export POETRY_REPOSITORIES_SPLUNK_ADD_ON_UCC_MODINPUT_TEST_URL=https://github.com/splunk/addonfactory-ucc-test.git | |
export POETRY_HTTP_BASIC_SPLUNK_ADD_ON_UCC_MODINPUT_TEST_USERNAME=${{ secrets.SA_GH_USER_NAME }} | |
export POETRY_HTTP_BASIC_SPLUNK_ADD_ON_UCC_MODINPUT_TEST_PASSWORD=${{ secrets.GH_TOKEN_ADMIN }} | |
poetry install --only modinput | |
poetry run ucc-test-modinput -o ${{ steps.download-openapi.outputs.download-path }}/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/ | |
- name: upload-swagger-artifacts-to-s3 | |
if: steps.download-openapi.conclusion != 'skipped' | |
id: swaggerupload | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
run: | | |
swagger_name=swagger_$(basename "$BUILD_NAME" .spl) | |
aws s3 sync "${{ steps.download-openapi.outputs.download-path }}/tmp/restapi_client/" "s3://ta-production-artifacts/ta-apps/$swagger_name/" --exclude "*" --include "README.md" --include "*swagger_client*" --only-show-errors | |
run-knowledge-tests: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-knowledge-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
continue-on-error: ${{ matrix.python39 }} | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | |
sc4s: ${{ fromJson(needs.meta.outputs.matrix_supportedSC4S) }} | |
python39: [false] | |
include: | |
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | |
sc4s: ${{ fromJson(needs.meta.outputs.python39_sc4s) }} | |
python39: true | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
TEST_TYPE: "knowledge" | |
TEST_ARGS: "" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-ko == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: "" | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-ko }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
sc4s-version: ${{ matrix.sc4s.version }} | |
sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation | |
id: update-argo-token | |
if: ${{ !cancelled() }} | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
shell: bash | |
if: ${{ !cancelled() }} | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Upload cim-compliance-report for ${{ matrix.splunk.version }} | |
uses: actions/upload-artifact@v4 | |
if: ${{ matrix.splunk.islatest == true }} | |
with: | |
name: cim-compliance-report | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results/cim-compliance-report.md | |
- name: Upload cim-field-report for ${{ matrix.splunk.version }} | |
uses: actions/upload-artifact@v4 | |
if: ${{ matrix.splunk.islatest == true }} | |
with: | |
name: cim-field-report | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results/cim_field_report.json | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() }} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
run-requirement-tests: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' && (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-requirement-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
continue-on-error: ${{ matrix.python39 }} | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_latestSplunk) }} | |
sc4s: ${{ fromJson(needs.meta.outputs.matrix_supportedSC4S) }} | |
python39: [false] | |
include: | |
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | |
sc4s: ${{ fromJson(needs.meta.outputs.python39_sc4s) }} | |
python39: true | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
TEST_TYPE: "requirement_test" | |
TEST_ARGS: "" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-requirement_test == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: "" | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-requirement_test }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
sc4s-version: ${{ matrix.sc4s.version }} | |
sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
shell: bash | |
if: ${{ !cancelled() }} | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() }} | |
with: | |
name: splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
run-ui-tests: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.ui == 'true' && (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-ui-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
continue-on-error: ${{ matrix.python39 }} | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | |
browser: [ "chrome","firefox" ] | |
vendor-version: ${{ fromJson(needs.meta.outputs.matrix_supportedUIVendors) }} | |
python39: [false] | |
include: | |
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | |
browser: "chrome" | |
python39: true | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
TEST_TYPE: "ui" | |
TEST_ARGS: "--browser ${{ matrix.browser }}" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}-${{ matrix.browser }}} | |
JOB_NAME=${JOB_NAME//[_.:]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-ui == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: ${{ env.TEST_ARGS }} | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-ui }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
vendor-version: ${{ matrix.vendor-version.image }} | |
sc4s-version: "No" | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation | |
id: update-argo-token | |
if: ${{ !cancelled() }} | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted" ; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() }} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
run-modinput-tests: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.modinput_functional == 'true' && (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-modinput-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
continue-on-error: ${{ matrix.python39 }} | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | |
modinput-type: [ "modinput_functional" ] | |
vendor-version: ${{ fromJson(needs.meta.outputs.matrix_supportedModinputFunctionalVendors) }} | |
marker: ${{ fromJson(inputs.marker) }} | |
python39: [false] | |
include: | |
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | |
modinput-type: [ "modinput_functional" ] | |
python39: true | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
TEST_TYPE: "modinput_functional" | |
TEST_ARGS: "" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-modinput_functional == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: create test argument | |
id: create-test-arg | |
shell: bash | |
run: | | |
export comparing_variable="[]" | |
if [ "${{ inputs.marker }}" == "$comparing_variable" ] | |
then | |
TEST_ARG_M="" | |
else | |
TEST_ARG_M="-m" | |
fi | |
echo "test-arg=$TEST_ARG_M" >> "$GITHUB_OUTPUT" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: ${{ env.TEST_ARGS }} ${{ steps.create-test-arg.outputs.test-arg }} ${{ matrix.marker }} | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-modinput_functional }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
vendor-version: ${{ matrix.vendor-version.image }} | |
sc4s-version: "No" | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation | |
id: update-argo-token | |
if: ${{ !cancelled() }} | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() }} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
run-scripted-input-tests-full-matrix: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
continue-on-error: ${{ matrix.python39 }} | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | |
os: [ "ubuntu:14.04", "ubuntu:16.04","ubuntu:18.04","ubuntu:22.04", "centos:7", "redhat:8.0", "redhat:8.2", "redhat:8.3", "redhat:8.4", "redhat:8.5" ] | |
python39: [false] | |
include: | |
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | |
os: "ubuntu:22.04" | |
python39: true | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
TEST_TYPE: "scripted_inputs" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: get os name and version | |
id: os-name-version | |
shell: bash | |
run: | | |
OS_NAME_VERSION=${{ matrix.os }} | |
# shellcheck disable=SC2206 | |
OS_NAME_VERSION=(${OS_NAME_VERSION//:/ }) | |
OS_NAME=${OS_NAME_VERSION[0]} | |
OS_VERSION=${OS_NAME_VERSION[1]} | |
{ | |
echo "os-name=$OS_NAME" | |
echo "os-version=$OS_VERSION" | |
} >> "$GITHUB_OUTPUT" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: "--hostname=spl --os-name=${{ steps.os-name-version.outputs.os-name }} --os-version=${{ steps.os-name-version.outputs.os-version }} -m script_input" | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
vendor-version: ${{ matrix.vendor-version.image }} | |
sc4s-version: "No" | |
os-name: ${{ steps.os-name-version.outputs.os-name }} | |
os-version: ${{ steps.os-name-version.outputs.os-version }} | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() }} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
run-scripted-input-tests-canary: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | |
os: [ "ubuntu:22.04", "centos:7","redhat:8.5" ] | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
TEST_TYPE: "scripted_inputs" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: get os name and version | |
id: os-name-version | |
shell: bash | |
run: | | |
OS_NAME_VERSION=${{ matrix.os }} | |
OS_NAME_VERSION=("${OS_NAME_VERSION//:/ }") | |
OS_NAME=${OS_NAME_VERSION[0]} | |
OS_VERSION=${OS_NAME_VERSION[1]} | |
{ | |
echo "os-name=$OS_NAME" | |
echo "os-version=$OS_VERSION" | |
} >> "$GITHUB_OUTPUT" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: "--hostname=spl --os-name=${{ steps.os-name-version.outputs.os-name }} --os-version=${{ steps.os-name-version.outputs.os-version }} -m script_input" | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
vendor-version: ${{ matrix.vendor-version.image }} | |
sc4s-version: "No" | |
os-name: ${{ steps.os-name-version.outputs.os-name }} | |
os-version: ${{ steps.os-name-version.outputs.os-version }} | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ !cancelled() }} | |
shell: bash | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ !cancelled() }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ !cancelled() }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() && !contains(matrix.splunk.version, 'unreleased-python3_9') }} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: Test Report Python 3.9 | |
continue-on-error: true | |
id: test_report_python_3_9 | |
uses: dorny/test-reporter@v1 | |
if: ${{ !cancelled() && contains(matrix.splunk.version, 'unreleased-python3_9') }} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
run-escu-tests: | |
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.escu == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' || github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-escu == 'Yes' || needs.setup-workflow.outputs.execute-escu-labeled == 'true') }} | |
needs: | |
- build | |
- test-inventory | |
- setup | |
- meta | |
- setup-workflow | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
splunk: ${{ fromJson(needs.meta.outputs.matrix_latestSplunk) }} | |
container: | |
image: ghcr.io/splunk/workflow-engine-base:2.0.3 | |
env: | |
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | |
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | |
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | |
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | |
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | |
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
TEST_TYPE: "escu" | |
permissions: | |
actions: read | |
deployments: read | |
contents: read | |
packages: read | |
statuses: read | |
checks: write | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: capture start time | |
id: capture-start-time | |
run: | | |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | |
- name: Configure AWS credentials | |
uses: aws-actions/configure-aws-credentials@v2 | |
with: | |
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | |
- name: Read secrets from AWS Secrets Manager into environment variables | |
id: get-argo-token | |
run: | | |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') | |
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | |
- name: create job name | |
id: create-job-name | |
shell: bash | |
run: | | |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | |
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | |
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | |
JOB_NAME=${JOB_NAME//[_.]/-} | |
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | |
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | |
- name: Splunk instance details | |
id: splunk-instance-details | |
if: ${{ needs.setup-workflow.outputs.delay-destroy-escu == 'Yes' }} | |
shell: bash | |
run: | | |
BOLD="\033[1m" | |
NORMAL="\033[0m" | |
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | |
echo -e "Splunk username is${BOLD} admin${NORMAL}" | |
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | |
- name: get escu detections | |
id: get-escu-detections | |
run: | | |
RUN_TEST=false | |
# shellcheck disable=SC2002 | |
DETECTIONS=$(cat tests/escu/.escu_detections | tr '\n' ',' | tr -d "[:space:]") | |
if [ -z "$DETECTIONS" ] | |
then | |
echo "Detection list is empty." | |
else | |
RUN_TEST=true | |
fi | |
DETECTIONS="-tf $DETECTIONS" | |
{ | |
echo "escu-detections=$DETECTIONS" | |
echo "escu-test-run=$RUN_TEST" | |
} >> "$GITHUB_OUTPUT" | |
- name: run-tests | |
id: run-tests | |
timeout-minutes: 340 | |
continue-on-error: true | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
uses: splunk/[email protected] | |
with: | |
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | |
test-type: ${{ env.TEST_TYPE }} | |
test-args: ${{ steps.get-escu-detections.outputs.escu-detections }} | |
job-name: ${{ steps.create-job-name.outputs.job-name }} | |
labels: ${{ needs.setup.outputs.labels }} | |
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | |
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | |
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-escu }} | |
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | |
addon-name: ${{ needs.setup.outputs.addon-name }} | |
vendor-version: ${{ matrix.vendor-version.image }} | |
sc4s-version: "No" | |
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | |
- name: calculate timeout | |
id: calculate-timeout | |
run: | | |
start_time=${{ steps.capture-start-time.outputs.start_time }} | |
current_time=$(date +%s) | |
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | |
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | |
- name: Check if pod was deleted | |
id: is-pod-deleted | |
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
run: | | |
set -o xtrace | |
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | |
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | |
fi | |
- name: Cancel workflow | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ cancelled() }} | |
run: | | |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | |
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | |
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | |
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | |
else | |
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | |
exit 1 | |
fi | |
- name: Retrying workflow | |
id: retry-wf | |
shell: bash | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
run: | | |
set -o xtrace | |
set +e | |
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | |
then | |
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | |
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | |
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | |
else | |
echo "No retry required" | |
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | |
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | |
fi | |
- name: check if workflow completed | |
env: | |
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
shell: bash | |
run: | | |
set +e | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
echo "Status of workflow:" "$ARGO_STATUS" | |
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | |
do | |
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | |
argo wait "${WORKFLOW_NAME}" -n workflows || true | |
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | |
done | |
- name: pull artifacts from s3 bucket | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
run: | | |
echo "pulling artifacts" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | |
- name: pull logs from s3 bucket | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
run: | | |
# shellcheck disable=SC2157 | |
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | |
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | |
else | |
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | |
fi | |
echo "pulling logs" | |
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | |
- uses: actions/upload-artifact@v4 | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results | |
- uses: actions/upload-artifact@v4 | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs | |
path: | | |
${{ needs.setup.outputs.directory-path }}/argo-logs | |
- name: Upload results | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' }} | |
uses: actions/upload-artifact@v4 | |
with: | |
name: escu-test-result | |
path: | | |
${{ needs.setup.outputs.directory-path }}/test-results/escu-result.xml | |
- name: Test Report | |
id: test_report | |
uses: dorny/test-reporter@v1 | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' && !cancelled() && !contains(matrix.splunk.version, 'unreleased-python3_9')}} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: Test Report Python 3.9 | |
continue-on-error: true | |
id: test_report_python_3_9 | |
uses: dorny/test-reporter@v1 | |
if: ${{ steps.get-escu-detections.outputs.escu-test-run == 'true' && !cancelled() && contains(matrix.splunk.version, 'unreleased-python3_9')}} | |
with: | |
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} test report | |
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | |
reporter: java-junit | |
- name: pull diag from s3 bucket | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
run: | | |
echo "pulling diag" | |
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | |
- uses: actions/upload-artifact@v4 | |
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | |
with: | |
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests diag | |
path: | | |
${{ needs.setup.outputs.directory-path }}/diag* | |
pre-publish: | |
if: ${{ !cancelled() }} | |
needs: | |
- meta | |
- compliance-copyrights | |
- lint | |
- review-secrets | |
- semgrep | |
- build | |
- virustotal | |
- test-inventory | |
- run-unit-tests-3_7 | |
- appinspect-cli | |
- setup | |
- run-knowledge-tests | |
- run-modinput-tests | |
- run-ui-tests | |
- validate-pr-title | |
runs-on: ubuntu-latest | |
env: | |
NEEDS: ${{ toJson(needs) }} | |
steps: | |
- name: check if tests have passed or skipped | |
id: check | |
shell: bash | |
run: | | |
RUN_PUBLISH=$(echo "$NEEDS" | jq ".[] | select( ( .result != \"skipped\" ) and .result != \"success\" ) | length == 0") | |
if [[ "$RUN_PUBLISH" != *'false'* ]] | |
then | |
echo "run-publish=true" >> "$GITHUB_OUTPUT" | |
else | |
echo "run-publish=false" >> "$GITHUB_OUTPUT" | |
fi | |
- name: exit without publish | |
if: ${{ steps.check.outputs.run-publish == 'false' || ( github.event.action == 'labeled' && github.event.label.name == 'preserve_infra' ) }} | |
run: | | |
echo " Some test job failed or Workflow has triggred on preserve_infra label. " | |
exit 1 | |
publish: | |
if: ${{ !cancelled() && needs.pre-publish.result == 'success' && github.event_name != 'pull_request' && github.event_name != 'schedule' }} | |
needs: | |
- pre-publish | |
- run-escu-tests | |
runs-on: ubuntu-latest | |
permissions: | |
contents: write | |
packages: read | |
pull-requests: read | |
statuses: write | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: false | |
persist-credentials: false | |
- name: Semantic Release | |
id: semantic | |
uses: splunk/[email protected] | |
env: | |
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_ADMIN }} | |
with: | |
git_committer_name: ${{ secrets.SA_GH_USER_NAME }} | |
git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} | |
gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} | |
passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} | |
- name: Download package-deployment | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
uses: actions/download-artifact@v4 | |
id: download-package-deployment | |
with: | |
name: package-deployment | |
path: download/artifacts/ | |
- name: Download package-splunkbase | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
uses: actions/download-artifact@v4 | |
id: download-package-splunkbase | |
with: | |
name: package-splunkbase | |
path: download/artifacts/deployment | |
- name: Download cim-compliance-report | |
id: download-cim-compliance-report | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
continue-on-error: true | |
uses: actions/download-artifact@v4 | |
with: | |
name: cim-compliance-report | |
path: download/artifacts/deployment | |
- name: Download cim-field-report | |
id: download-cim-field-report | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
continue-on-error: true | |
uses: actions/download-artifact@v4 | |
with: | |
name: cim-field-report | |
path: download/artifacts/deployment | |
- name: Download escu-test-results | |
id: download-escu-xml | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
continue-on-error: true | |
uses: actions/download-artifact@v4 | |
with: | |
name: escu-test-result | |
path: download/artifacts/deployment | |
- name: List of assets | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
run: | | |
ls -la ${{ steps.download-package-splunkbase.outputs.download-path }} | |
- name: Upload assets to release | |
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | |
uses: svenstaro/upload-release-action@v2 | |
with: | |
repo_token: ${{ github.token }} | |
file: ${{ steps.download-package-splunkbase.outputs.download-path }}/* | |
overwrite: true | |
file_glob: true | |
tag: v${{ steps.semantic.outputs.new_release_version }} |