From af0ea74b8753377f47035876be812acee2f552cd Mon Sep 17 00:00:00 2001 From: Liora Milbaum Date: Wed, 10 Apr 2024 13:47:18 +0300 Subject: [PATCH] Provision with Ansible Signed-off-by: Liora Milbaum --- .github/scripts/provision.sh | 29 ------- .github/workflows/testing-framework.yaml | 30 +++++--- .../chatbot/provision/playbook.yml | 76 +++++++++++++++++++ .../chatbot/provision/requirements.yml | 4 + 4 files changed, 100 insertions(+), 39 deletions(-) delete mode 100755 .github/scripts/provision.sh create mode 100644 recipes/natural_language_processing/chatbot/provision/playbook.yml create mode 100644 recipes/natural_language_processing/chatbot/provision/requirements.yml diff --git a/.github/scripts/provision.sh b/.github/scripts/provision.sh deleted file mode 100755 index 4685c5ee3..000000000 --- a/.github/scripts/provision.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash - -set -x - -if [[ "$(uname -m)" == "aarch64" ]]; then - arch="arm64" -else - arch="amd64" -fi - -dnf install -y podman wget \ - https://s3.us-east-2.amazonaws.com/amazon-ssm-us-east-2/latest/linux_${arch}/amazon-ssm-agent.rpm -dnf clean all - -wget -P locallm/models https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -podman run -it -d \ - -p 8001:8001 \ - -v ./locallm/models:/locallm/models:ro,Z \ - -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf \ - -e HOST=0.0.0.0 \ - -e PORT=8001 \ - ghcr.io/containers/model_servers:latest -podman run -it \ - -p 8501:8501 \ - -e MODEL_SERVICE_ENDPOINT=http://10.88.0.1:8001/v1 \ - ghcr.io/containers/chatbot:latest - -# this file is sampled when the terraform apply is running -touch /tmp/user_data_completed diff --git a/.github/workflows/testing-framework.yaml b/.github/workflows/testing-framework.yaml index 4be7bd842..d3a74b472 100644 --- a/.github/workflows/testing-framework.yaml +++ b/.github/workflows/testing-framework.yaml @@ -6,9 +6,9 @@ on: workflow_dispatch: - # pull_request: ## temporary for debugging development purposes - # branches: - # - main + pull_request: ## temporary for debugging development purposes + branches: + - main env: TF_VAR_aws_region: "eu-west-2" @@ -17,7 +17,6 @@ env: TF_VAR_aws_volume_size: 100 TF_VAR_aws_access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} TF_VAR_aws_secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TF_VAR_provision_script: ../main/.github/scripts/provision.sh # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} # AWS_DEFAULT_REGION: "eu-west-2" @@ -41,7 +40,8 @@ jobs: aws_image_type: m7g.medium aws_ami_architecture: arm64 steps: - - uses: actions/checkout@v4.1.1 + - name: Checkout + uses: actions/checkout@v4.1.1 with: path: main @@ -59,11 +59,11 @@ jobs: terraform_version: "1.7.5" terraform_wrapper: false - - name: Init Terraform + - name: Init run: terraform init working-directory: terraform-test-environment-module - - name: Spin Test Environment + - name: Bootstrap id: up run: terraform apply -auto-approve -lock=false working-directory: terraform-test-environment-module @@ -76,8 +76,21 @@ jobs: run: | echo "id=$(terraform output id | xargs)" >> $GITHUB_OUTPUT echo "url=$(terraform output host | xargs)" >> $GITHUB_OUTPUT + echo "pem_filename=$(terraform output pem_filename | xargs)" >> $GITHUB_OUTPUT working-directory: terraform-test-environment-module + - name: Ansible Collections + run: ansible-galaxy install -r ./provision/requirements.yml + working-directory: ./main/recipes/natural_language_processing/chatbot + + - name: Provision + run: | + ansible-playbook ./main/recipes/natural_language_processing/chatbot/provision/playbook.yml \ + -i terraform-test-environment-module/hosts.ini \ + --private-key=terraform-test-environment-module/${{ steps.terraform-output.outputs.pem_filename }} + env: + ANSIBLE_HOST_KEY_CHECKING: false + - name: Set up Python uses: actions/setup-python@v5.0.0 with: @@ -87,9 +100,6 @@ jobs: working-directory: ./main/recipes/natural_language_processing/chatbot run: make install - - name: Wait for the user data script to finish - run: sleep 240 - - name: Run Integration Tests working-directory: ./main/recipes/natural_language_processing/chatbot run: make integration-tests diff --git a/recipes/natural_language_processing/chatbot/provision/playbook.yml b/recipes/natural_language_processing/chatbot/provision/playbook.yml new file mode 100644 index 000000000..2a61bb06a --- /dev/null +++ b/recipes/natural_language_processing/chatbot/provision/playbook.yml @@ -0,0 +1,76 @@ +--- +- name: Test Environment Provisioning + hosts: test_environments + remote_user: fedora + become: true + gather_facts: false + vars: + arch: "amd64" + + tasks: + + - name: Wait until the instance is ready + ansible.builtin.wait_for_connection: + delay: 10 + timeout: 60 + + - name: Gather facts for first time + ansible.builtin.setup: + + - name: Set variable arch + set_fact: + arch: "arm64" + when: ansible_architecture == "aarch64" + + # - name: Download GPG key file + # ansible.builtin.get_url: + # url: "https://s3.eu-west-2.amazonaws.com/amazon-ssm-eu-west-2/latest/linux_{{ arch }}/amazon-ssm-agent.rpm.sig" + # dest: "/tmp/gpg_key.pub" + + # - name: Import GPG key + # ansible.builtin.rpm_key: + # state: present + # key: "/tmp/gpg_key.pub" + + # - name: Required packages + # ansible.builtin.package: + # name: "https://s3.eu-west-2.amazonaws.com/amazon-ssm-us-east-2/latest/linux_{{ arch }}/amazon-ssm-agent.rpm" + # state: present + + - name: Models host directory + ansible.builtin.file: + path: locallm/models + + - name: Download Model + ansible.builtin.get_url: + url: https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf + dest: locallm/models + state: directory + + - name: Run Model + containers.podman.podman_container: + image: ghcr.io/containers/llamacpp_python:latest + state: started + interactive: true + tty: true + detach: true + ports: + - 8001:8001 + volume: + - ./locallm/models:/locallm/models:ro,Z + env: + MODEL_PATH: models/llama-2-7b-chat.Q5_K_S.gguf + HOST: 0.0.0.0 + PORT: 8001 + + - name: Run Application + containers.podman.podman_container: + image: ghcr.io/containers/chatbot:latest + state: started + interactive: true + tty: true + ports: + - 8501:8501 + env: + MODEL_SERVICE_ENDPOINT: http://10.88.0.1:8001/v1 + diff --git a/recipes/natural_language_processing/chatbot/provision/requirements.yml b/recipes/natural_language_processing/chatbot/provision/requirements.yml new file mode 100644 index 000000000..79a61a5ff --- /dev/null +++ b/recipes/natural_language_processing/chatbot/provision/requirements.yml @@ -0,0 +1,4 @@ +--- +collections: + - name: containers.podman + version: 1.12.0