Skip to content

Commit

Permalink
playground tests
Browse files Browse the repository at this point in the history
Signed-off-by: Liora Milbaum <[email protected]>
  • Loading branch information
lmilbaum committed Mar 28, 2024
1 parent bd7eace commit eae323a
Show file tree
Hide file tree
Showing 18 changed files with 264 additions and 41 deletions.
9 changes: 9 additions & 0 deletions .devcontainer/Containerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM quay.io/containers/podman:v4.9.3

USER root

COPY ./requirements-dev.txt .

RUN dnf install -y python3.11 python3-pip buildah git && \
dnf clean all && \
pip3 install -r requirements-dev.txt
12 changes: 12 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "recepies",
"build": {
"dockerfile": "Containerfile",
"context": ".."
},
"privileged": true,
"containerEnv": {
"REGISTRY": "ghcr.io",
"IMAGE_NAME": "ai-lab-recipes/playground"
}
}
36 changes: 2 additions & 34 deletions .github/workflows/build-images.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ jobs:
#GH_REGISTRY: ghcr.io
# github.repository as <account>/<repo>
MODEL_SERVICE_IMAGE: locallm-model-service
CHATBOT_IMAGE: locallm-chatbot
SUMMARIZER_IMAGE: locallm-text-summarizer
CODEGEN_IMAGE: locallm-codegen
RAG_IMAGE: locallm-rag
Expand Down Expand Up @@ -57,13 +56,6 @@ jobs:
files: |
code-generation/**
- name: Get changed chatbot files
id: changed-files-chatbot
uses: tj-actions/changed-files@v42
with:
files: |
chatbot-langchain/**
- name: Get changed chromadb files
id: changed-files-chromadb
uses: tj-actions/changed-files@v42
Expand All @@ -72,14 +64,14 @@ jobs:
rag-langchain/builds/chromadb/**
- name: Install qemu dependency
if: steps.changed-files-model-service.outputs.any_changed == 'true' || steps.changed-files-chatbot.outputs.any_changed == 'true' || steps.changed-files-summarizer.outputs.any_changed == 'true' || steps.changed-files-rag.outputs.any_changed == 'true' || steps.changed-files-codegen.outputs.any_changed == 'true'
if: steps.changed-files-model-service.outputs.any_changed == 'true' || steps.changed-files-summarizer.outputs.any_changed == 'true' || steps.changed-files-rag.outputs.any_changed == 'true' || steps.changed-files-codegen.outputs.any_changed == 'true'
run: |
sudo apt-get update
sudo apt-get install -y qemu-user-static
- name: Login Quay Container registry
if: >
(steps.changed-files-model-service.outputs.any_changed == 'true' || steps.changed-files-chatbot.outputs.any_changed == 'true' || steps.changed-files-summarizer.outputs.any_changed == 'true' || steps.changed-files-rag.outputs.any_changed == 'true' || steps.changed-files-chromadb.outputs.any_changed == 'true' || steps.changed-files-codegen.outputs.any_changed == 'true') &&
(steps.changed-files-model-service.outputs.any_changed == 'true' || steps.changed-files-summarizer.outputs.any_changed == 'true' || steps.changed-files-rag.outputs.any_changed == 'true' || steps.changed-files-chromadb.outputs.any_changed == 'true' || steps.changed-files-codegen.outputs.any_changed == 'true') &&
(github.event_name == 'push' || github.event_name == 'schedule') &&
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
uses: redhat-actions/podman-login@v1
Expand Down Expand Up @@ -111,30 +103,6 @@ jobs:
tags: ${{ steps.build_model_service_multiplatform.outputs.tags }}
registry: ${{ env.REGISTRY }}

- name: Build chatbot
id: build_chatbot_multiplatform
uses: redhat-actions/buildah-build@v2
if: steps.changed-files-chatbot.outputs.any_changed == 'true'
with:
image: ${{ env.CHATBOT_IMAGE }}
tags: latest ${{ github.sha }}
platforms: linux/amd64, linux/arm64
context: chatbot-langchain
containerfiles: |
./chatbot-langchain/builds/Containerfile
- name: Push chatbot image
id: push_chatbot
if: >
(steps.changed-files-chatbot.outputs.any_changed == 'true') &&
(github.event_name == 'push' || github.event_name == 'schedule') &&
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
uses: redhat-actions/push-to-registry@v2
with:
image: ${{ steps.build_chatbot_multiplatform.outputs.image }}
tags: ${{ steps.build_chatbot_multiplatform.outputs.tags }}
registry: ${{ env.REGISTRY }}

- name: Build code-generation
if: steps.changed-files-codegen.outputs.any_changed == 'true'
id: build_codegen_multiplatform
Expand Down
48 changes: 48 additions & 0 deletions .github/workflows/chatbot-langchain.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: chatbot-langchain

on:
pull_request:
branches:
- main
push:
branches:
- main

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/chatbot-langchain

jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
services:
registry:
image: registry:2.8.3
ports:
- 5000:5000
steps:
- uses: actions/[email protected]

- name: Login to ghcr
uses: docker/[email protected]
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Buildah Action
uses: redhat-actions/[email protected]
with:
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: latest
containerfiles: ./chatbot-langchain/builds/Containerfile
context: chatbot-langchain

- name: Set up Python
uses: actions/[email protected]

- name: Run tests
run: make -f chatbot-langchain/Makefile test
48 changes: 48 additions & 0 deletions .github/workflows/playground.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: playground

on:
pull_request:
branches:
- main
push:
branches:
- main

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/playground

jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
services:
registry:
image: registry:2.8.3
ports:
- 5000:5000
steps:
- uses: actions/[email protected]

- name: Login to ghcr
uses: docker/[email protected]
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Buildah Action
uses: redhat-actions/[email protected]
with:
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: latest
containerfiles: ./playground/Containerfile
context: playground

- name: Set up Python
uses: actions/[email protected]

- name: Run tests
run: make -f playground/Makefile test
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
*.gguf
*.bin
*_pycache_*
port_check.lock
18 changes: 18 additions & 0 deletions chatbot-langchain/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
.PHONY: build
build:
podman build -f chatbot-langchain/Containerfile -t ghcr.io/ai-lab-recipes/playground --format docker playground

models/llama-2-7b-chat.Q5_K_S.gguf:
curl -s -S -L -f https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -z $@ -o $@.tmp && mv -f $@.tmp $@ 2>/dev/null || rm -f $@.tmp $@

.PHONY: download
download:
pip install -r playground/tests/requirements.txt

.PHONY: run
run: install models/llama-2-7b-chat.Q5_K_S.gguf
podman run -it -d -p 8001:8001 -v ./models:/locallm/models:ro,Z -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf -e HOST=0.0.0.0 -e PORT=8001 --net=host ghcr.io/redhat-et/playground

.PHONY: test
test: models/llama-2-7b-chat.Q5_K_S.gguf download
pytest --log-cli-level NOTSET
Empty file.
23 changes: 23 additions & 0 deletions chatbot-langchain/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import pytest_container
import os


CL = pytest_container.Container(
url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}",
extra_environment_variables={
"MODEL_SERVICE_ENDPOINT": "http://10.88.0.1:8001/v1"
},
forwarded_ports=[
pytest_container.PortForwarding(
container_port=8501,
host_port=8501
)
],
extra_launch_args=["--net=host"]
)

def pytest_generate_tests(metafunc):
pytest_container.auto_container_parametrize(metafunc)

def pytest_addoption(parser):
pytest_container.add_logging_level_options(parser)
6 changes: 6 additions & 0 deletions chatbot-langchain/tests/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pip==24.0
pytest-container==0.4.0
pytest-testinfra==10.1.0
pytest==8.1.1
requests==2.31.0
tenacity==8.2.3
5 changes: 2 additions & 3 deletions playground/Containerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
FROM registry.access.redhat.com/ubi9/python-39:latest
FROM registry.access.redhat.com/ubi9/python-311:1-52
WORKDIR /locallm
COPY requirements.txt /locallm/requirements.txt
RUN pip install --upgrade pip
RUN pip install --no-cache-dir --upgrade -r /locallm/requirements.txt
RUN pip install --no-cache-dir --verbose -r /locallm/requirements.txt
COPY run.sh run.sh
EXPOSE 8001
ENTRYPOINT [ "sh", "run.sh" ]
18 changes: 18 additions & 0 deletions playground/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
.PHONY: build
build:
podman build -f playground/Containerfile -t ghcr.io/ai-lab-recipes/playground --format docker playground

models/llama-2-7b-chat.Q5_K_S.gguf:
curl -s -S -L -f https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -z $@ -o $@.tmp && mv -f $@.tmp $@ 2>/dev/null || rm -f $@.tmp $@

.PHONY: download
download:
pip install -r requirements-dev.txt

.PHONY: run
run: install models/llama-2-7b-chat.Q5_K_S.gguf
podman run -it -d -p 8001:8001 -v ./models:/locallm/models:ro,Z -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf -e HOST=0.0.0.0 -e PORT=8001 --net=host ghcr.io/redhat-et/playground

.PHONY: test
test: models/llama-2-7b-chat.Q5_K_S.gguf download
pytest --log-cli-level NOTSET
27 changes: 24 additions & 3 deletions playground/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@
From this directory,

```bash
podman build -t playground:image .
podman build -t playground .
```
or
```bash
make -f Containerfile build
```

### Download Model
Expand All @@ -20,6 +24,10 @@ cd ../models
wget <Download URL>
cd ../
```
or
```bash
make -f Containerfile download
```

### Deploy Model Service

Expand All @@ -34,7 +42,11 @@ podman run --rm -it -d \
-e MODEL_PATH=models/<model-filename> \
-e HOST=0.0.0.0 \
-e PORT=8001 \
playground:image`
playground`
```
or
```bash
make -f Containerfile run
```

#### Multiple Model Service:
Expand Down Expand Up @@ -68,5 +80,14 @@ podman run --rm -it -d \
-p 8001:8001 \
-v Local/path/to/locallm/models:/locallm/models:ro,Z \
-e CONFIG_PATH=models/<config-filename> \
playground:image
playground
```

### DEV environment

The environment is implemented with devcontainer technology.

Running tests
```bash
make -f Containerfile test
```
3 changes: 2 additions & 1 deletion playground/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
llama-cpp-python[server]
llama-cpp-python[server]==0.2.57
pip==24.0
Empty file added playground/tests/__init__.py
Empty file.
32 changes: 32 additions & 0 deletions playground/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import pytest_container
import os


MS = pytest_container.Container(
url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}",
volume_mounts=[
pytest_container.container.BindMount(
container_path="/locallm/models",
host_path="./models",
flags=["ro"]
)
],
extra_environment_variables={
"MODEL_PATH": "models/llama-2-7b-chat.Q5_K_S.gguf",
"HOST": "0.0.0.0",
"PORT": "8001"
},
forwarded_ports=[
pytest_container.PortForwarding(
container_port=8001,
host_port=8001
)
],
extra_launch_args=["--net=host"]
)

def pytest_generate_tests(metafunc):
pytest_container.auto_container_parametrize(metafunc)

def pytest_addoption(parser):
pytest_container.add_logging_level_options(parser)
13 changes: 13 additions & 0 deletions playground/tests/test_alive.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import pytest_container
from .conftest import MS
import tenacity

CONTAINER_IMAGES = [MS]


def test_etc_os_release_present(auto_container: pytest_container.container.ContainerData):
assert auto_container.connection.file("/etc/os-release").exists

@tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential())
def test_alive(auto_container: pytest_container.container.ContainerData, host):
host.run_expect([0],f"curl http://localhost:{auto_container.forwarded_ports[0].host_port}",).stdout.strip()
6 changes: 6 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pip==24.0
pytest-container==0.4.0
pytest-testinfra==10.1.0
pytest==8.1.1
requests==2.31.0
tenacity==8.2.3

0 comments on commit eae323a

Please sign in to comment.