-
Notifications
You must be signed in to change notification settings - Fork 0
126 lines (104 loc) · 6.46 KB
/
e2e_tests.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
name: End-to-End Tests
#on: [push, pull_request]
on: [push]
#on:
# pull_request_target:
# types: [labeled]
jobs:
test:
runs-on: ubuntu-latest
environment: "GitHub Actions 1"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ASSISTANTS_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ASSISTANTS_API_TYPE: ${{ secrets.ASSISTANTS_API_TYPE }}
ASSISTANTS_ID: ${{ secrets.ASSISTANTS_ID }}
ASSISTANTS_BASE_URL: ${{ secrets.ASSISTANTS_BASE_URL }}
ASSISTANTS_MODEL: ${{ secrets.ASSISTANTS_MODEL }}
ASSISTANTS_BOT_NAME: ${{ secrets.ASSISTANTS_BOT_NAME }}
POSTGRES_DATA_HOST: ${{ secrets.POSTGRES_DATA_HOST }}
POSTGRES_DATA_PORT: ${{ secrets.POSTGRES_DATA_PORT }}
POSTGRES_DATA_DB: ${{ secrets.POSTGRES_DATA_DB }}
POSTGRES_DATA_USER: ${{ secrets.POSTGRES_DATA_USER }}
POSTGRES_DATA_PASSWORD: ${{ secrets.POSTGRES_DATA_PASSWORD }}
POSTGRES_RECIPE_HOST: ${{ secrets.POSTGRES_RECIPE_HOST }}
POSTGRES_RECIPE_PORT: ${{ secrets.POSTGRES_RECIPE_PORT }}
POSTGRES_RECIPE_DB: ${{ secrets.POSTGRES_RECIPE_DB }}
POSTGRES_RECIPE_USER: ${{ secrets.POSTGRES_RECIPE_USER }}
POSTGRES_RECIPE_PASSWORD: ${{ secrets.POSTGRES_RECIPE_PASSWORD }}
RECIPES_OPENAI_API_TYPE: ${{ secrets.RECIPES_OPENAI_API_TYPE }}
RECIPES_OPENAI_API_KEY: ${{ secrets.RECIPES_OPENAI_API_KEY }}
RECIPES_MODEL: ${{ secrets.RECIPES_MODEL }}
RECIPES_OPENAI_TEXT_COMPLETION_DEPLOYMENT_NAME: ${{ secrets.RECIPES_OPENAI_TEXT_COMPLETION_DEPLOYMENT_NAME }}
RECIPES_MEMORY_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_MEMORY_SIMILARITY_CUTOFF }}
RECIPES_RECIPE_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_RECIPE_SIMILARITY_CUTOFF }}
RECIPES_HELPER_FUNCTION_SIMILARITY_CUTOFF: ${{ secrets.RECIPES_HELPER_FUNCTION_SIMILARITY_CUTOFF }}
RECIPES_MODEL_TEMP: ${{ secrets.RECIPES_MODEL_TEMP }}
RECIPES_MODEL_MAX_TOKENS: ${{ secrets.RECIPES_MODEL_MAX_TOKENS }}
IMAGE_HOST: ${{ secrets.IMAGE_HOST }}
RECIPE_SERVER_API: ${{ secrets.RECIPE_SERVER_API }}
CHAINLIT_AUTH_SECRET: ${{ secrets.CHAINLIT_AUTH_SECRET }}
USER_LOGIN: ${{ secrets.USER_LOGIN }}
USER_PASSWORD: ${{ secrets.USER_PASSWORD }}
#services:
#datadb:
# image: postgis/postgis:12-3.4
# env:
# POSTGRES_DB: ${{ secrets.POSTGRES_DATA_DB }}
# POSTGRES_USER: ${{ secrets.POSTGRES_DATA_USER }}
# POSTGRES_PASSWORD: ${{ secrets.POSTGRES_DATA_PASSWORD }}
# ports:
# - 5433:5432
steps:
- name: Checkout
uses: actions/checkout@v3
#- name: Checkout integration tests data
# uses: actions/checkout@master
# with:
# repository: datakind/recipes-ai-test-data
# ssh-key: ${{ secrets.SSH_PRIVATE_KEY}}
# path: recipes-ai-test-data
- name: Expose GitHub Runtime
uses: crazy-max/ghaction-github-runtime@v2
- name: Build images
run: |
env > .env
export DATA_DB_CONN_STRING="postgresql://${POSTGRES_DATA_USER}:${POSTGRES_DATA_PASSWORD}@${POSTGRES_DATA_HOST}:${POSTGRES_DATA_PORT}/${POSTGRES_DATA_DB}"
export POSTGRES_RECIPE_CONN_STRING="postgresql://${POSTGRES_RECIPE_USER}:${POSTGRES_RECIPE_PASSWORD}@${POSTGRES_RECIPE_HOST}:${POSTGRES_RECIPE_PORT}/${POSTGRES_RECIPE_DB}"
#docker buildx create --use --driver=docker-container
#docker buildx bake -f ./docker-compose-dev.yml --set *.cache-to="type=gha,mode=max" --set *.cache-from="type=gha" --load --set *.platform=linux/amd64 --metadata-file metadata.json
#cat metadata.json
#docker images
- name: Spin up services
run: |
# TODO this should be enhanced to use a buildx bake to leverage layer caching for faster builds, or push to repo and simply have a pull for the run
# TODO docker-compose files should be refactored to use scopes instead of different versions for each environment
docker-compose -f ./docker-compose-github.yml pull
docker-compose -f ./docker-compose-github.yml up -d
echo "logs datadb ..."
docker logs datadb
#docker-compose -f docker-compose-github.yml logs datadb
# - name: Run the stack
# run: |
# export DATA_DB_CONN_STRING="postgresql://${POSTGRES_DATA_USER}:${POSTGRES_DATA_PASSWORD}@${POSTGRES_DATA_HOST}:${POSTGRES_DATA_PORT}/${POSTGRES_DATA_DB}"
# export POSTGRES_RECIPE_CONN_STRING="postgresql://${POSTGRES_RECIPE_USER}:${POSTGRES_RECIPE_PASSWORD}@${POSTGRES_RECIPE_HOST}:${POSTGRES_RECIPE_PORT}/${POSTGRES_RECIPE_DB}"
# env > .env
# #docker compose -f docker-compose.yml -f docker-compose-dev.yml up -d recipedb datadb server promptflow
# docker ps
# - name: End to End tests using Promtpflow and chainlit code
# run: |
# set -e
# docker ps
# echo "logs backend ..."
# docker-compose -f docker-compose.yml -f docker-compose-dev.yml logs server
# echo "logs datadb ..."
# docker-compose -f docker-compose.yml -f docker-compose-dev.yml logs datadb
# echo "logs recipesdb ..."
# docker-compose -f docker-compose.yml -f docker-compose-dev.yml logs recipedb
# echo "Tests ..."
# docker compose -f docker-compose.yml -f docker-compose-dev.yml exec --workdir /app/chainlit-ui-evaluation promptflow pf run create --flow . --data ./data.jsonl --stream --column-mapping query='${data.query}' context='${data.context}' chat_history='${data.chat_history}' --name base_run
# docker compose -f docker-compose.yml -f docker-compose-dev.yml exec --workdir /app/chainlit-ui-evaluation promptflow pf run show-details -n base_run
# docker compose -f docker-compose.yml -f docker-compose-dev.yml exec --workdir /app/chainlit-ui-evaluation promptflow pf run show-metrics -n base_run
# #docker compose -f docker-compose.yml -f docker-compose-dev.yml exec --workdir /app/chainlit-ui-evaluation promptflow pf run visualize -n base_run
# docker compose -f docker-compose.yml -f docker-compose-dev.yml exec --workdir /app/chainlit-ui-evaluation promptflow python3 check_evaluation_results.py
# docker-compose -f docker-stack.yml -f docker-compose-dev.yml down -v --remove-orphans