Skip to content

Added e2e tests GH action, with docker compose build - debugging action #117

Added e2e tests GH action, with docker compose build - debugging action

Added e2e tests GH action, with docker compose build - debugging action #117

Workflow file for this run

name: End-to-End Tests
#on: [push, pull_request]
on: [push]
#on:
# pull_request_target:
# types: [labeled]
jobs:
test:
runs-on: ubuntu-latest
environment: "GitHub Actions 1"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ASSISTANTS_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ASSISTANTS_API_TYPE: ${{ secrets.ASSISTANTS_API_TYPE }}
ASSISTANTS_ID: ${{ secrets.ASSISTANTS_ID }}
ASSISTANTS_BASE_URL: ${{ secrets.ASSISTANTS_BASE_URL }}
ASSISTANTS_MODEL: ${{ secrets.ASSISTANTS_MODEL }}
ASSISTANTS_BOT_NAME: ${{ secrets.ASSISTANTS_BOT_NAME }}
POSTGRES_DATA_HOST: ${{ secrets.POSTGRES_DATA_HOST }}
POSTGRES_DATA_PORT: ${{ secrets.POSTGRES_DATA_PORT }}
POSTGRES_DATA_DB: ${{ secrets.POSTGRES_DATA_DB }}
POSTGRES_DATA_USER: ${{ secrets.POSTGRES_DATA_USER }}
POSTGRES_DATA_PASSWORD: ${{ secrets.POSTGRES_DATA_PASSWORD }}
DATA_DB_CONN_STRING: "postgresql://${{ secrets.POSTGRES_DATA_USER }}:${{ secrets.POSTGRES_DATA_PASSWORD }}@${{ secrets.POSTGRES_DATA_HOST }}:${{ secrets.POSTGRES_DATA_PORT }}/${{ secrets.POSTGRES_DATA_DB }}"
POSTGRES_RECIPE_HOST: ${{ secrets.POSTGRES_RECIPE_HOST }}
POSTGRES_RECIPE_PORT: ${{ secrets.POSTGRES_RECIPE_PORT }}
POSTGRES_RECIPE_DB: ${{ secrets.POSTGRES_RECIPE_DB }}
POSTGRES_RECIPE_USER: ${{ secrets.POSTGRES_RECIPE_USER }}
POSTGRES_RECIPE_PASSWORD: ${{ secrets.POSTGRES_RECIPE_PASSWORD }}
RECIPE_DB_CONN_STRING: "postgresql://${{ secrets.POSTGRES_RECIPE_USER }}:${{ secrets.POSTGRES_RECIPE_PASSWORD }}@${{ secrets.POSTGRES_RECIPE_HOST }}:${{ secrets.POSTGRES_RECIPE_PORT }}/${{ secrets.POSTGRES_RECIPE_DB }}"
RECIPES_OPENAI_API_TYPE: ${{ secrets.RECIPES_OPENAI_API_TYPE }}
RECIPES_OPENAI_API_KEY: ${{ secrets.RECIPES_OPENAI_API_KEY }}
RECIPES_MODEL: ${{ secrets.RECIPES_MODEL }}
RECIPES_OPENAI_TEXT_COMPLETION_DEPLOYMENT_NAME: ${{ secrets.RECIPES_OPENAI_TEXT_COMPLETION_DEPLOYMENT_NAME }}
RECIPES_MEMORY_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_MEMORY_SIMILARITY_CUTOFF }}
RECIPES_RECIPE_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_RECIPE_SIMILARITY_CUTOFF }}
RECIPES_HELPER_FUNCTION_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_HELPER_FUNCTION_SIMILARITY_CUTOFF }}
RECIPES_MODEL_TEMP: ${{ secrets.RECIPES_MODEL_TEMP }}
RECIPES_MODEL_MAX_TOKENS: ${{ secrets.RECIPES_MODEL_MAX_TOKENS }}
IMAGE_HOST: ${{ secrets.IMAGE_HOST }}
RECIPE_SERVER_API: ${{ secrets.RECIPE_SERVER_API }}
CHAINLIT_AUTH_SECRET: ${{ secrets.CHAINLIT_AUTH_SECRET }}
USER_LOGIN: ${{ secrets.USER_LOGIN }}
USER_PASSWORD: ${{ secrets.USER_PASSWORD }}
steps:
- name: Checkout
uses: actions/checkout@v3
#- name: Checkout integration tests data
# uses: actions/checkout@master
# with:
# repository: datakind/recipes-ai-test-data
# ssh-key: ${{ secrets.GITHUB_SSH_PRIVATE_KEY}}
# path: recipes-ai-test-data
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v2
- name: Spin up DB and recipes server
run: |
env > .env
# TODO this should be enhanced to use a buildx bake to leverage layer caching for faster builds, or push to repo and simply have a pull for the run
# TODO docker-compose files should be refactored to use scopes instead of different versions for each environment
docker-compose -f ./docker-compose-github.yml pull
docker-compose -f ./docker-compose-github.yml up -d --build
echo "logs datadb ..."
docker-compose -f docker-compose-github.yml logs datadb
# TODO The promptflow docker build wasn't working in GH actions, so deployed to host for now
- name: Set up promtpflow and run tests
uses: actions/setup-python@v4
with:
python-version: "3.11.4"
- run: |
pip3 install promptflow==1.12.0
pip3 install promptflow-tools==1.4.0
pip3 install chainlit==1.1.305
pip3 install keyrings.alt
pip3 list
- run: |
cd flows/chainlit-ui-evaluation/
pf connection create --file ./openai.yaml --set api_key=$OPENAI_API_KEY --name open_ai_connection
pf connection create --file ./azure_openai.yaml --set api_key=$OPENAI_API_KEY --set api_base=$OPENAI_API_ENDPOINT --name azure_openai
pf run create --flow . --data ./data.jsonl --stream --column-mapping query='${data.query}' context='${data.context}' chat_history='${data.chat_history}' --name base_run --stream
pf run show-details -n base_run2
pf run show-metrics -n base_run2
#pf run visualize -n base_run2
python3 check_evaluation_results.py