Skip to content

chore: update ci

chore: update ci #2

Workflow file for this run

name: Run macOS tests
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
run_test:
runs-on: self-hosted
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.LOCAL_AI_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.LOCAL_AI_AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.LOCAL_AI_AWS_REGION }}
- name: Download artifact from S3
run: |
aws s3 cp s3://appflowy-local-ai-test/macos-latest/AppFlowyLLM_debug.zip AppFlowyLLM_macos-latest_debug.zip
unzip AppFlowyLLM_macos-latest_debug.zip -d AppFlowyLLM
cd AppFlowyLLM
pwd
ls
shell: bash
- name: Download models
run: |
curl -sSL -o tinyllama.gguf "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_0.gguf?download=true"
curl -sSL -o all-MiniLM-L12-v2.F16.gguf "https://huggingface.co/leliuga/all-MiniLM-L12-v2-GGUF/resolve/main/all-MiniLM-L12-v2.F16.gguf?download=true"
ls
- name: Prepare env
run: |
ls
ABS_PATH=$(pwd)
chat_bin_path=$ABS_PATH/AppFlowyLLM/appflowy_ai_plugin
embedding_bin_path=$ABS_PATH/AppFlowyLLM/appflowy_embedding_plugin
cp dev.env .env
sed -i '' 's|RUST_LOG=.*|RUST_LOG=trace|' .env
# binary
sed -i '' "s|CHAT_BIN_PATH=.*|CHAT_BIN_PATH=$chat_bin_path|" .env
sed -i '' "s|EMBEDDING_BIN_PATH=.*|EMBEDDING_BIN_PATH=$embedding_bin_path|" .env
# model
sed -i '' "s|LOCAL_AI_MODEL_DIR=.*|LOCAL_AI_MODEL_DIR=$ABS_PATH|" .env
sed -i '' 's|LOCAL_AI_CHAT_MODEL_NAME=.*|LOCAL_AI_CHAT_MODEL_NAME=tinyllama.gguf|' .env
sed -i '' 's|LOCAL_AI_EMBEDDING_MODEL_NAME=.*|LOCAL_AI_EMBEDDING_MODEL_NAME=all-MiniLM-L12-v2.F16.gguf|' .env
cat .env
shell: bash
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: 1.77.2
override: true
profile: minimal
- name: Run tests
run: cargo test ci_
shell: bash
- name: Cleanup downloaded artifacts
run: |
rm -rf AppFlowyLLM_macos-latest_debug.zip
rm -rf AppFlowyLLM
rm -rf tinyllama.gguf
rm -rf all-MiniLM-L12-v2.F16.gguf