Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

playground tests #96

Merged
merged 3 commits into from
Mar 28, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
playground tests
Signed-off-by: Liora Milbaum <lmilbaum@redhat.com>
  • Loading branch information
lmilbaum authored and sallyom committed Mar 28, 2024
commit 6e1904934bf87f4e0505b1db4db9cb0dcb13062e
9 changes: 9 additions & 0 deletions .devcontainer/Containerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM quay.io/containers/podman:v4.9.3

USER root

COPY playground/tests/requirements.txt .

RUN dnf install -y python3.11 python3-pip buildah git && \
dnf clean all && \
pip3 install -r requirements.txt
12 changes: 12 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "recepies",
"build": {
"dockerfile": "Containerfile",
"context": ".."
},
"privileged": true,
"containerEnv": {
"REGISTRY": "ghcr.io",
"IMAGE_NAME": "ai-lab-recipes/playground"
}
}
48 changes: 48 additions & 0 deletions .github/workflows/playground.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: playground

on:
pull_request:
branches:
- main
push:
branches:
- main

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/playground

jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
services:
registry:
image: registry:2.8.3
ports:
- 5000:5000
steps:
- uses: actions/checkout@v4.1.1

- name: Login to ghcr
uses: docker/login-action@v3.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Buildah Action
uses: redhat-actions/buildah-build@v2.13
with:
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: latest
containerfiles: ./playground/Containerfile
context: playground

- name: Set up Python
uses: actions/setup-python@v5.0.0

- name: Run tests
run: make -f playground/Makefile test
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
*.gguf
*.bin
*_pycache_*
port_check.lock
27 changes: 24 additions & 3 deletions model_servers/llamacpp_python/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@
From this directory,

```bash
podman build -t playground:image .
podman build -t playground .
```
or
```bash
make -f Containerfile build
```

### Download Model
Expand All @@ -20,6 +24,10 @@ cd ../models
wget <Download URL>
cd ../
```
or
```bash
make -f Containerfile download
```

### Deploy Model Service

Expand All @@ -34,7 +42,11 @@ podman run --rm -it -d \
-e MODEL_PATH=models/<model-filename> \
-e HOST=0.0.0.0 \
-e PORT=8001 \
playground:image`
playground`
```
or
```bash
make -f Containerfile run
```

#### Multiple Model Service:
Expand Down Expand Up @@ -68,5 +80,14 @@ podman run --rm -it -d \
-p 8001:8001 \
-v Local/path/to/locallm/models:/locallm/models:ro,Z \
-e CONFIG_PATH=models/<config-filename> \
playground:image
playground
```

### DEV environment

The environment is implemented with devcontainer technology.

Running tests
```bash
make -f Containerfile test
```
9 changes: 5 additions & 4 deletions model_servers/llamacpp_python/base/Containerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
FROM registry.access.redhat.com/ubi9/python-39:latest
FROM registry.access.redhat.com/ubi9/python-311:1-52
WORKDIR /locallm
COPY src .
RUN pip install --upgrade pip
RUN pip install --no-cache-dir --upgrade -r /locallm/requirements.txt
COPY requirements.txt /locallm/requirements.txt
RUN pip install --no-cache-dir --verbose -r /locallm/requirements.txt
COPY run.sh run.sh
EXPOSE 8001
ENTRYPOINT [ "sh", "run.sh" ]
ENTRYPOINT [ "sh", "run.sh" ]
18 changes: 18 additions & 0 deletions model_servers/llamacpp_python/base/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
.PHONY: build
build:
podman build -f Containerfile -t ghcr.io/ai-lab-recipes/playground --format docker playground

models/llama-2-7b-chat.Q5_K_S.gguf:
curl -s -S -L -f https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -z $@ -o $@.tmp && mv -f $@.tmp $@ 2>/dev/null || rm -f $@.tmp $@

.PHONY: download
download:
pip install -r tests/requirements.txt

.PHONY: run
run: install models/llama-2-7b-chat.Q5_K_S.gguf
podman run -it -d -p 8001:8001 -v ./models:/locallm/models:ro,Z -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf -e HOST=0.0.0.0 -e PORT=8001 --net=host ghcr.io/redhat-et/playground

.PHONY: test
test: models/llama-2-7b-chat.Q5_K_S.gguf download
pytest --log-cli-level NOTSET
Empty file.
32 changes: 32 additions & 0 deletions model_servers/llamacpp_python/base/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import pytest_container
import os


MS = pytest_container.Container(
url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}",
volume_mounts=[
pytest_container.container.BindMount(
container_path="/locallm/models",
host_path="./models",
flags=["ro"]
)
],
extra_environment_variables={
"MODEL_PATH": "models/llama-2-7b-chat.Q5_K_S.gguf",
"HOST": "0.0.0.0",
"PORT": "8001"
},
forwarded_ports=[
pytest_container.PortForwarding(
container_port=8001,
host_port=8001
)
],
extra_launch_args=["--net=host"]
)

def pytest_generate_tests(metafunc):
pytest_container.auto_container_parametrize(metafunc)

def pytest_addoption(parser):
pytest_container.add_logging_level_options(parser)
6 changes: 6 additions & 0 deletions model_servers/llamacpp_python/base/tests/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pip==24.0
pytest-container==0.4.0
pytest-testinfra==10.1.0
pytest==8.1.1
requests==2.31.0
tenacity==8.2.3
13 changes: 13 additions & 0 deletions model_servers/llamacpp_python/base/tests/test_alive.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import pytest_container
from .conftest import MS
import tenacity

CONTAINER_IMAGES = [MS]


def test_etc_os_release_present(auto_container: pytest_container.container.ContainerData):
assert auto_container.connection.file("/etc/os-release").exists

@tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential())
def test_alive(auto_container: pytest_container.container.ContainerData, host):
host.run_expect([0],f"curl http://localhost:{auto_container.forwarded_ports[0].host_port}",).stdout.strip()
3 changes: 2 additions & 1 deletion model_servers/llamacpp_python/src/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
llama-cpp-python[server]
llama-cpp-python[server]==0.2.57
pip==24.0