Skip to content

Commit

Permalink
feat: add btool check (#316)
Browse files Browse the repository at this point in the history
Added workflow `run btool check` to validate the configuration in the TA
using btool check
Ref: https://splunk.atlassian.net/browse/ADDON-70489

Test workflow runs:
-
https://github.com/splunk/splunk-add-on-for-google-workspace/actions/runs/10631152841
-
https://github.com/splunk/splunk-add-on-for-servicenow/actions/runs/10919108085

---------

Co-authored-by: mkolasinski-splunk <[email protected]>
Co-authored-by: Artem Rys <[email protected]>
Co-authored-by: kdoroszko-splunk <[email protected]>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: kgwizdz-splunk <[email protected]>
Co-authored-by: Adam Wownysz <[email protected]>
Co-authored-by: Marcin Bruzda <[email protected]>
Co-authored-by: harshilgajera-crest <[email protected]>
  • Loading branch information
9 people authored Oct 29, 2024
1 parent 55904ee commit 908de12
Show file tree
Hide file tree
Showing 2 changed files with 223 additions and 1 deletion.
199 changes: 198 additions & 1 deletion .github/workflows/reusable-build-test-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ on:
required: false
description: "branch for k8s manifests to run the tests on"
type: string
default: "v3.0.5"
default: "v3.1.0"
scripted-inputs-os-list:
required: false
description: "list of OS used for scripted input tests"
Expand Down Expand Up @@ -989,6 +989,203 @@ jobs:
swagger_name=swagger_$(basename "$BUILD_NAME" .spl)
aws s3 sync "${{ steps.download-openapi.outputs.download-path }}/tmp/restapi_client/" "s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/$swagger_name/" --exclude "*" --include "README.md" --include "*swagger_client*" --only-show-errors
run-btool-check:
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && needs.setup-workflow.outputs.execute-knowledge-labeled == 'true' }}
needs:
- build
- test-inventory
- setup
- meta
- setup-workflow
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
splunk: ${{ fromJson(needs.meta.outputs.matrix_latestSplunk) }}
sc4s: ${{ fromJson(needs.meta.outputs.matrix_supportedSC4S) }}
container:
image: ghcr.io/splunk/workflow-engine-base:4.1.0
env:
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }}
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }}
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }}
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }}
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }}
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }}
TEST_TYPE: "btool"
TEST_ARGS: ""
permissions:
actions: read
deployments: read
contents: read
packages: read
statuses: read
checks: write
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: capture start time
id: capture-start-time
run: |
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT"
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- name: Read secrets from AWS Secrets Manager into environment variables
id: get-argo-token
run: |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString')
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT"
- name: create job name
id: create-job-name
shell: bash
run: |
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4)
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING}
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}}
JOB_NAME=${JOB_NAME//[_.]/-}
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]')
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT"
- name: run-btool-check
id: run-btool-check
timeout-minutes: 10
env:
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
uses: splunk/[email protected]
with:
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }}
test-type: ${{ env.TEST_TYPE }}
test-args: ""
job-name: ${{ steps.create-job-name.outputs.job-name }}
labels: ${{ needs.setup.outputs.labels }}
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }}
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }}
addon-url: ${{ needs.setup.outputs.addon-upload-path }}
addon-name: ${{ needs.setup.outputs.addon-name }}
sc4s-version: ${{ matrix.sc4s.version }}
sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }}
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }}
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation
id: update-argo-token
if: ${{ !cancelled() }}
run: |
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString')
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT"
- name: calculate timeout
id: calculate-timeout
run: |
start_time=${{ steps.capture-start-time.outputs.start_time }}
current_time=$(date +%s)
remaining_time_minutes=$(( 10-((current_time-start_time)/60) ))
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT"
- name: Check if pod was deleted
id: is-pod-deleted
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }}
if: ${{ !cancelled() }}
shell: bash
env:
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }}
run: |
set -o xtrace
if argo watch ${{ steps.run-btool-check.outputs.workflow-name }} -n workflows | grep "pod deleted"; then
echo "retry-workflow=true" >> "$GITHUB_OUTPUT"
fi
- name: Cancel workflow
env:
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }}
if: ${{ cancelled() }}
run: |
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-btool-check.outputs.workflow-name }})
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' )
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows)
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-btool-check.outputs.workflow-name }} stopped"; then
echo "Workflow ${{ steps.run-btool-check.outputs.workflow-name }} stopped"
else
echo "Workflow ${{ steps.run-btool-check.outputs.workflow-name }} didn't stop"
exit 1
fi
- name: Retrying workflow
id: retry-wf
shell: bash
env:
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }}
if: ${{ !cancelled() }}
run: |
set -o xtrace
set +e
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]]
then
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-btool-check.outputs.workflow-name }}" | jq -r .metadata.name)
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT"
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..."
else
echo "No retry required"
argo wait "${{ steps.run-btool-check.outputs.workflow-name }}" -n workflows
argo watch "${{ steps.run-btool-check.outputs.workflow-name }}" -n workflows | grep "btool-check"
fi
- name: check workflow status
id: check-workflow-status
env:
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }}
shell: bash
if: ${{ !cancelled() }}
run: |
set +e
# shellcheck disable=SC2157
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then
WORKFLOW_NAME=${{ steps.run-btool-check.outputs.workflow-name }}
else
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}"
fi
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase')
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ]
do
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete."
argo wait "${WORKFLOW_NAME}" -n workflows || true
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase')
done
echo "Status of workflow:" "$ARGO_STATUS"
echo "workflow-status=$ARGO_STATUS" >> "$GITHUB_OUTPUT"
if [ "$ARGO_STATUS" == "Succeeded" ]; then
exit 0
else
exit 1
fi
- name: pull artifacts from s3 bucket
if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }}
run: |
echo "pulling artifacts"
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }}
- name: pull logs from s3 bucket
if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }}
run: |
# shellcheck disable=SC2157
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then
WORKFLOW_NAME=${{ steps.run-btool-check.outputs.workflow-name }}
else
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}"
fi
echo "pulling logs"
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/workflows/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive
- uses: actions/upload-artifact@v4
if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }}
with:
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts
path: |
${{ needs.setup.outputs.directory-path }}/test-results
- uses: actions/upload-artifact@v4
if: ${{ !cancelled() && steps.check-workflow-status.outputs.workflow-status != 'Succeeded' }}
with:
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs
path: |
${{ needs.setup.outputs.directory-path }}/argo-logs
run-knowledge-tests:
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && needs.setup-workflow.outputs.execute-knowledge-labeled == 'true' }}
needs:
Expand Down
25 changes: 25 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,31 @@ test_cim_output.txt
test_check_unicode_output.txt
```

# run-btool-check

**Description:**

- This stage validates the configuration of the TA using btool

**Action used:**
- No action used

**Pass/fail behaviour:**

- The stage is expected to fail only if there are errors reported by btool check.

**Troubleshooting steps for failures if any:**

- btool-output.txt can be used for identifying the errors reported by the btool check

- User can validate the configuration by executing btool check in local env using `$SPLUNK_HOME/bin/splunk btool check | grep "Splunk_TA_example"`.

**Artifacts:**

```
btool-output.txt
```

# run-knowledge-tests

**Description:**
Expand Down

0 comments on commit 908de12

Please sign in to comment.