diff --git a/.github/scripts/provision.sh b/.github/scripts/provision.sh deleted file mode 100755 index 4685c5ee..00000000 --- a/.github/scripts/provision.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env bash - -set -x - -if [[ "$(uname -m)" == "aarch64" ]]; then - arch="arm64" -else - arch="amd64" -fi - -dnf install -y podman wget \ - https://s3.us-east-2.amazonaws.com/amazon-ssm-us-east-2/latest/linux_${arch}/amazon-ssm-agent.rpm -dnf clean all - -wget -P locallm/models https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -podman run -it -d \ - -p 8001:8001 \ - -v ./locallm/models:/locallm/models:ro,Z \ - -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf \ - -e HOST=0.0.0.0 \ - -e PORT=8001 \ - ghcr.io/containers/model_servers:latest -podman run -it \ - -p 8501:8501 \ - -e MODEL_SERVICE_ENDPOINT=http://10.88.0.1:8001/v1 \ - ghcr.io/containers/chatbot:latest - -# this file is sampled when the terraform apply is running -touch /tmp/user_data_completed diff --git a/.github/workflows/testing-framework.yaml b/.github/workflows/testing-framework.yaml index 4be7bd84..74b2f2f1 100644 --- a/.github/workflows/testing-framework.yaml +++ b/.github/workflows/testing-framework.yaml @@ -17,7 +17,6 @@ env: TF_VAR_aws_volume_size: 100 TF_VAR_aws_access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} TF_VAR_aws_secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - TF_VAR_provision_script: ../main/.github/scripts/provision.sh # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} # AWS_DEFAULT_REGION: "eu-west-2" @@ -41,7 +40,8 @@ jobs: aws_image_type: m7g.medium aws_ami_architecture: arm64 steps: - - uses: actions/checkout@v4.1.1 + - name: Checkout + uses: actions/checkout@v4.1.1 with: path: main @@ -59,11 +59,11 @@ jobs: terraform_version: "1.7.5" terraform_wrapper: false - - name: Init Terraform + - name: Init run: terraform init working-directory: terraform-test-environment-module - - name: Spin Test Environment + - name: Bootstrap id: up run: terraform apply -auto-approve -lock=false working-directory: terraform-test-environment-module @@ -76,8 +76,21 @@ jobs: run: | echo "id=$(terraform output id | xargs)" >> $GITHUB_OUTPUT echo "url=$(terraform output host | xargs)" >> $GITHUB_OUTPUT + echo "pem_filename=$(terraform output pem_filename | xargs)" >> $GITHUB_OUTPUT working-directory: terraform-test-environment-module + - name: Ansible Collections + run: ansible-galaxy install -r ./provision/requirements.yml + working-directory: ./main/recipes/natural_language_processing/chatbot + + - name: Provision + run: | + ansible-playbook ./main/recipes/natural_language_processing/chatbot/provision/playbook.yml \ + -i terraform-test-environment-module/hosts.ini \ + --private-key=terraform-test-environment-module/${{ steps.terraform-output.outputs.pem_filename }} + env: + ANSIBLE_HOST_KEY_CHECKING: false + - name: Set up Python uses: actions/setup-python@v5.0.0 with: @@ -87,9 +100,6 @@ jobs: working-directory: ./main/recipes/natural_language_processing/chatbot run: make install - - name: Wait for the user data script to finish - run: sleep 240 - - name: Run Integration Tests working-directory: ./main/recipes/natural_language_processing/chatbot run: make integration-tests diff --git a/recipes/natural_language_processing/chatbot/provision/playbook.yml b/recipes/natural_language_processing/chatbot/provision/playbook.yml new file mode 100644 index 00000000..3670e104 --- /dev/null +++ b/recipes/natural_language_processing/chatbot/provision/playbook.yml @@ -0,0 +1,60 @@ +--- +- name: Test Environment Provisioning + hosts: test_environments + remote_user: fedora + become: true + gather_facts: false + + tasks: + + - name: Wait until the instance is ready + ansible.builtin.wait_for_connection: + delay: 10 + timeout: 60 + + - name: Gather facts for first time + ansible.builtin.setup: + + - name: Required Packages + ansible.builtin.package: + name: podman + state: present + + - name: Models host directory + ansible.builtin.file: + path: locallm/models + state: directory + + - name: Download Model + ansible.builtin.get_url: + url: https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf + dest: locallm/models + + - name: Run Model + containers.podman.podman_container: + name: llamacpp_python + image: ghcr.io/containers/llamacpp_python:latest + state: started + interactive: true + tty: true + detach: true + ports: + - 8001:8001 + volume: + - ./locallm/models:/locallm/models:ro,Z + env: + MODEL_PATH: models/llama-2-7b-chat.Q5_K_S.gguf + HOST: 0.0.0.0 + PORT: 8001 + + - name: Run Application + containers.podman.podman_container: + name: chatbot + image: ghcr.io/containers/chatbot:latest + state: started + interactive: true + tty: true + ports: + - 8501:8501 + env: + MODEL_SERVICE_ENDPOINT: http://10.88.0.1:8001/v1 diff --git a/recipes/natural_language_processing/chatbot/provision/requirements.yml b/recipes/natural_language_processing/chatbot/provision/requirements.yml new file mode 100644 index 00000000..79a61a5f --- /dev/null +++ b/recipes/natural_language_processing/chatbot/provision/requirements.yml @@ -0,0 +1,4 @@ +--- +collections: + - name: containers.podman + version: 1.12.0