Skip to content

RunTestForArchitecture #1230

RunTestForArchitecture

RunTestForArchitecture #1230

name: testing
# Controls when the workflow will run
on:
# Triggers the workflow on push or pull request events but only for the main branch
pull_request_target:
types: [ labeled ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
start-runner:
if: ${{ (contains(github.event.pull_request.labels.*.name, 'safe to test') && !contains(github.event.pull_request.labels.*.name, 'lgtm')) || (contains(github.event_name, 'workflow_dispatch')) }}
runs-on: ubuntu-latest
outputs:
label: ${{ steps.start-ec2-runner.outputs.label }}
ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }}
permissions:
id-token: write
contents: read
steps:
- name: Setup AWS Credentials
uses: aws-actions/configure-aws-credentials@master
with:
role-to-assume: arn:aws:iam::783680406432:role/GithubActionsTestingRole-test-us-east-1
aws-region: us-east-1
- name: Configure EC2 Runner Parameters
run: |
AWS_REGION=us-east-1
VPC_ID=$(aws ec2 describe-vpcs --filters 'Name=tag-key,Values=GithubActionsTesting' --query 'Vpcs[0].VpcId' --output text)
SUBNET_ID=$(aws ec2 describe-subnets --filters "Name=vpc-id,Values=$VPC_ID" "Name=tag-key,Values=GithubActionsTesting" --query 'Subnets[0].SubnetId' --output text)
SG_ID=$(aws ec2 describe-security-groups --filters "Name=vpc-id,Values=$VPC_ID" "Name=tag-key,Values=GithubActionsTesting" --query 'SecurityGroups[0].GroupId' --output text)
AMI=$(aws ssm get-parameter --name /aws/service/ami-amazon-linux-latest/al2023-ami-kernel-default-x86_64 --query 'Parameter.Value' --output text)
echo AWS_REGION=$AWS_REGION >> $GITHUB_ENV
echo SUBNET_ID=$SUBNET_ID >> $GITHUB_ENV
echo SG_ID=$SG_ID >> $GITHUB_ENV
echo AMI=$AMI >> $GITHUB_ENV
- name: Start EC2 runner
id: start-ec2-runner
uses: divyansh-gupta/ec2-github-runner@0444f5f46462bcf8d98932bc807d2f51c4945b58
with:
mode: start
github-token: GithubToken-test-us-east-1
ec2-image-id: ${{ env.AMI }}
ec2-instance-type: t3.medium
subnet-id: ${{ env.SUBNET_ID }}
security-group-id: ${{ env.SG_ID }}
iam-role-name: K8sPluginInstanceProfile-test-us-east-1
ec2-launch-template: GithubRunnerLaunchTemplate-test-us-east-1
aws-resource-tags: >
[
{"Key": "Name", "Value": "ec2-github-runner"},
{"Key": "GitHubRepository", "Value": "${{ github.repository }}"}
]
pre-runner-script: |
sudo yum install libicu -y
do-the-job:
if: ${{ (contains(github.event.pull_request.labels.*.name, 'safe to test') && !contains(github.event.pull_request.labels.*.name, 'lgtm')) || (contains(github.event_name, 'workflow_dispatch')) }}
name: Do the job on the runner
needs: start-runner # required to start the main job when the runner is ready
runs-on: ${{ needs.start-runner.outputs.label }} # run the job on the newly created runner
steps:
- name: Install Git
run: sudo yum install git -y
- name: Check out code into the Go module directory
uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
- name: Install Development Tools
run: |
sudo yum groupinstall "Development Tools" -y
- name: Install Go
run: |
mkdir /home/ec2-user/.cache
mkdir /home/ec2-user/.cache/go-build
mkdir /home/ec2-user/.cache/go-mod
mkdir /home/ec2-user/go
mkdir /home/ec2-user/go/bin
GOVERSION=go1.22.3
wget https://go.dev/dl/$GOVERSION.linux-amd64.tar.gz
sudo rm -rf /usr/local/go && sudo tar -C /usr/local -xzf $GOVERSION.linux-amd64.tar.gz
PATH="$PATH:/usr/local/go/bin"
GOPATH="/home/ec2-user/go"
GOBIN="/home/ec2-user/go/bin"
GOCACHE="/home/ec2-user/.cache/go-build"
GOMODCACHE="/home/ec2-user/.cache/go-mod"
echo GOPATH=$GOPATH >> $GITHUB_ENV
echo GOBIN=$GOBIN >> $GITHUB_ENV
echo GOCACHE=$GOCACHE >> $GITHUB_ENV
echo GOMODCACHE=$GOMODCACHE >> $GITHUB_ENV
echo PATH=$PATH >> $GITHUB_ENV
- name: Setup Cache variables
id: go-cache-paths
shell: bash
run: |
echo "go-build=${{ env.GOCACHE }}" >> $GITHUB_OUTPUT
echo "go-mod=${{ env.GOMODCACHE }}" >> $GITHUB_OUTPUT
- name: Go-Build Cache # Cache go build cache, used to speedup go test
uses: actions/cache@v4
with:
path: ${{ steps.go-cache-paths.outputs.go-build }}
key: ${{ runner.os }}-go-build-${{ hashFiles('**/go.sum') }}
- name: Go-Mod Cache # Cache go mod cache, used to speedup builds
uses: actions/cache@v4
with:
path: ${{ steps.go-cache-paths.outputs.go-mod }}
key: ${{ runner.os }}-go-mod-${{ hashFiles('**/go.sum') }}
- name: Install Docker
run: |
sudo yum install docker -y
sudo service docker start
- name: Install AWS CLI v2
run: |
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
- name: Install kubectl
run: |
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl
- name: Install Jq
run: |
sudo yum install jq -y
- name: Install Helm
run: |
curl -sSL https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash
- name: Setup test environment
run: |
export PLUGIN_USER_NAME_OVERRIDE=K8sPluginUser-test-us-east-1
export PLUGIN_CROSS_ACCOUNT_ROLE=arn:aws:iam::874411242420:role/CrossAccountTestingRole-crossaccount-us-east-1
export HOME=$GITHUB_WORKSPACE
echo PLUGIN_USER_NAME_OVERRIDE=$PLUGIN_USER_NAME_OVERRIDE >> $GITHUB_ENV
echo PLUGIN_CROSS_ACCOUNT_ROLE=$PLUGIN_CROSS_ACCOUNT_ROLE >> $GITHUB_ENV
echo HOME=$HOME >> $GITHUB_ENV
- name: Run test cases with EC2 Instance Profiles and K8s Secret
run: |
make cluster
make e2etest
- name: Copy Kind logs to S3
if: ${{ always() }}
run: |
mkdir logs
export E2E_ARTIFACTS_DIRECTORY=logs
make kind-export-logs
aws s3 cp --recursive logs s3://aws-privateca-issuer-k8s-logs-test-us-east-1/${{ needs.start-runner.outputs.ec2-instance-id }}/
- name: Terminate Kind cluster and setup for next test
if: ${{ always() }}
run: |
make kind-cluster-delete
export OIDC_IAM_ROLE=arn:aws:iam::783680406432:role/IrsaTestingRole-test-us-east-1
export OIDC_S3_BUCKET_NAME=aws-privateca-issuer-irsa-test-us-east-1
echo OIDC_IAM_ROLE=$OIDC_IAM_ROLE >> $GITHUB_ENV
echo OIDC_S3_BUCKET_NAME=$OIDC_S3_BUCKET_NAME >> $GITHUB_ENV
- name: Run test cases with IRSA and K8s Secret
if: ${{ always() }}
run: |
make cluster
make install-eks-webhook
make e2etest
- name: Copy Kind logs to S3
if: ${{ always() }}
run: |
mkdir logs-irsa
export E2E_ARTIFACTS_DIRECTORY=logs-irsa
make kind-export-logs
aws s3 cp --recursive logs-irsa s3://aws-privateca-issuer-k8s-logs-test-us-east-1/${{ needs.start-runner.outputs.ec2-instance-id }}-irsa/
- name: Terminate Kind cluster
if: ${{ always() }}
run: |
make kind-cluster-delete
- name: Run helm test
if: ${{ !contains(github.event.pull_request.labels.*.name, 'chart update') }}
run: |
make helm-test
- name: Copy Kind logs to S3
if: ${{ !contains(github.event.pull_request.labels.*.name, 'chart update') }}
run: |
mkdir logs-helm-test
export E2E_ARTIFACTS_DIRECTORY=logs-helm-test
make kind-export-logs
aws s3 cp --recursive logs-helm-test s3://aws-privateca-issuer-k8s-logs-test-us-east-1/${{ needs.start-runner.outputs.ec2-instance-id }}-logs-helm-test/
- name: Terminate Kind cluster
if: ${{ !contains(github.event.pull_request.labels.*.name, 'chart update') }}
run: |
make kind-cluster-delete
- name: Run Blog Test
run: |
make cluster
make install-eks-webhook
make blog-test
- name: Copy Kind logs to S3
if: ${{ always() }}
run: |
mkdir logs-blog
export E2E_ARTIFACTS_DIRECTORY=logs-blog
make kind-export-logs
aws s3 cp --recursive logs-blog s3://aws-privateca-issuer-k8s-logs-test-us-east-1/${{ needs.start-runner.outputs.ec2-instance-id }}-blog/
stop-runner:
name: Stop self-hosted EC2 runner
needs:
- start-runner # required to get output from the start-runner job
- do-the-job # required to wait when the main job is done
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
if: ${{ (contains(github.event.pull_request.labels.*.name, 'safe to test') && !contains(github.event.pull_request.labels.*.name, 'lgtm') && always()) || (contains(github.event_name, 'workflow_dispatch')) }} # required to stop the runner even if the error happened in the previous jobs
steps:
- name: Setup AWS Credentials
uses: aws-actions/configure-aws-credentials@master
with:
role-to-assume: arn:aws:iam::783680406432:role/GithubActionsTestingRole-test-us-east-1
aws-region: us-east-1
- name: Setup AWS Region
run: |
AWS_REGION=us-east-1
echo AWS_REGION=$AWS_REGION >> $GITHUB_ENV
- name: Stop EC2 runner
uses: divyansh-gupta/ec2-github-runner@0444f5f46462bcf8d98932bc807d2f51c4945b58
with:
mode: stop
github-token: GithubToken-test-us-east-1
label: ${{ needs.start-runner.outputs.label }}
ec2-instance-id: ${{ needs.start-runner.outputs.ec2-instance-id }}
remove-safe-to-test:
name: Remove Safe to Test Label
needs:
- stop-runner
runs-on: ubuntu-latest
if: ${{ contains(github.event.pull_request.labels.*.name, 'safe to test') && !contains(github.event.pull_request.labels.*.name, 'lgtm') && always() }} # required to stop the runner even if the error happened in the previous jobs
steps:
- name: Remove Label
uses: divyansh-gupta/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0
with:
labels:
'safe to test'
#We want to make this false because if we run into some edge case
#we don't want to fail out. Worst case
#the label stays and we remove/re-add.
fail_on_error: 'false'