ci: Add github workflow for nitro node #65
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build nitro-node | |
on: | |
schedule: | |
- cron: "0 20 * * *" # At 0:20 UTC, which is 7:20 AM UTC+7 | |
push: | |
branches: | |
- main | |
tags: ["v[0-9]+.[0-9]+.[0-9]+"] | |
paths: [".github/workflows/build-nitro-node.yml", "nitro-node"] | |
pull_request: | |
types: [opened, synchronize, reopened] | |
paths: [".github/workflows/build-nitro-node.yml", "nitro-node"] | |
workflow_dispatch: | |
env: | |
LLM_MODEL_URL: https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf | |
WHISPER_MODEL_URL: https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny-q5_1.bin | |
jobs: | |
build: | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
os: | |
- ubuntu-18-04-cuda-12-0 | |
- ubuntu-18-04-cuda-11-7 | |
- macos-silicon | |
- macos-latest | |
- windows-latest | |
- windows-cuda-12-0 | |
- windows-cuda-11-7 | |
steps: | |
- name: Clone | |
id: checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: recursive | |
- name: Install dependencies | |
if: ${{ startswith(runner.os, 'ubuntu') }} | |
run: | | |
sudo apt-get install gawk bison -y | |
cd `mktemp -d` | |
wget -c https://ftp.gnu.org/gnu/glibc/glibc-2.34.tar.gz | |
tar -zxvf glibc-2.34.tar.gz && cd glibc-2.34 | |
mkdir glibc-build && cd glibc-build | |
../configure --prefix=/opt/glibc-2.34 | |
make | |
sudo make install | |
- uses: actions/setup-node@v4 | |
with: | |
node-version: 18 | |
- name: Install yarn@1 | |
run: | | |
corepack enable | |
corepack install -g yarn@1 | |
- name: Restore cached model file | |
id: cache-model-restore | |
uses: actions/cache/restore@v4 | |
with: | |
path: | | |
nitro-node/test/test_assets/*.gguf | |
key: ${{ runner.os }}-model-gguf | |
- uses: suisei-cn/[email protected] | |
id: download-model-file | |
name: Download model file | |
with: | |
url: "The model we are using is [tinyllama-1.1b](${{ env.LLM_MODEL_URL }})!" | |
target: nitro-node/test/test_assets/ | |
auto-match: true | |
retry-times: 3 | |
- name: Save downloaded model file to cache | |
id: cache-model-save | |
uses: actions/cache/save@v4 | |
with: | |
path: | | |
nitro-node/test/test_assets/*.gguf | |
key: ${{ steps.cache-model-restore.outputs.cache-primary-key }} | |
- name: Run tests | |
id: test_nitro_node | |
run: | | |
cd nitro-node | |
make clean test-ci |