Skip to content

Commit

Permalink
playground tests
Browse files Browse the repository at this point in the history
Signed-off-by: Liora Milbaum <[email protected]>
  • Loading branch information
lmilbaum committed Mar 27, 2024
1 parent 00cd386 commit 645d10d
Show file tree
Hide file tree
Showing 11 changed files with 143 additions and 4 deletions.
9 changes: 9 additions & 0 deletions .devcontainer/Containerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM quay.io/containers/podman:v4.9.3

USER root

COPY playground/tests/requirements.txt .

RUN dnf install -y python3.11 python3-pip buildah git && \
dnf clean all && \
pip3 install -r requirements.txt
12 changes: 12 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "recepies",
"build": {
"dockerfile": "Containerfile",
"context": ".."
},
"privileged": true,
"containerEnv": {
"REGISTRY": "ghcr.io",
"IMAGE_NAME": "ai-lab-recipes/playground"
}
}
48 changes: 48 additions & 0 deletions .github/workflows/playground.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
name: playground

on:
pull_request:
branches:
- main
push:
branches:
- main

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/playground

jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
services:
registry:
image: registry:2.8.3
ports:
- 5000:5000
steps:
- uses: actions/[email protected]

- name: Login to ghcr
uses: docker/[email protected]
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Buildah Action
uses: redhat-actions/[email protected]
with:
image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: latest
containerfiles: ./playground/Containerfile
context: playground

- name: Set up Python
uses: actions/[email protected]

- name: Run tests
run: make -f playground/Makefile test
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
*.gguf
*.bin
*_pycache_*
port_check.lock
5 changes: 2 additions & 3 deletions playground/Containerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
FROM registry.access.redhat.com/ubi9/python-39:latest
FROM registry.access.redhat.com/ubi9/python-311:1-52
WORKDIR /locallm
COPY requirements.txt /locallm/requirements.txt
RUN pip install --upgrade pip
RUN pip install --no-cache-dir --upgrade -r /locallm/requirements.txt
RUN pip install --no-cache-dir --verbose -r /locallm/requirements.txt
COPY run.sh run.sh
EXPOSE 8001
ENTRYPOINT [ "sh", "run.sh" ]
19 changes: 19 additions & 0 deletions playground/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
.PHONY: build
build:
podman build -f playground/Containerfile -t ghcr.io/ai-lab-recipes/playground --format docker playground

models/llama-2-7b-chat.Q5_K_S.gguf:
# wget -P models https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -o /dev/null
curl -s -S -L -f https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -z $@ -o $@.tmp && mv -f $@.tmp $@ 2>/dev/null || rm -f $@.tmp $@

.PHONY: install
install:
pip install -r playground/tests/requirements.txt

.PHONY: run
run: install models/llama-2-7b-chat.Q5_K_S.gguf
podman run -it -d -p 8001:8001 -v ./models:/locallm/models:ro,Z -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf -e HOST=0.0.0.0 -e PORT=8001 --net=host ghcr.io/redhat-et/playground

.PHONY: test
test: models/llama-2-7b-chat.Q5_K_S.gguf install
pytest --log-cli-level NOTSET
3 changes: 2 additions & 1 deletion playground/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
llama-cpp-python[server]
llama-cpp-python[server]==0.2.57
pip==24.0
Empty file added playground/tests/__init__.py
Empty file.
32 changes: 32 additions & 0 deletions playground/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import pytest_container
import os


TW = pytest_container.Container(
url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}",
volume_mounts=[
pytest_container.container.BindMount(
container_path="/locallm/models",
host_path="./models",
flags=["ro"]
)
],
extra_environment_variables={
"MODEL_PATH": "models/llama-2-7b-chat.Q5_K_S.gguf",
"HOST": "0.0.0.0",
"PORT": "8001"
},
forwarded_ports=[
pytest_container.PortForwarding(
container_port=8001,
host_port=8001
)
],
extra_launch_args=["--net=host"]
)

def pytest_generate_tests(metafunc):
pytest_container.auto_container_parametrize(metafunc)

def pytest_addoption(parser):
pytest_container.add_logging_level_options(parser)
5 changes: 5 additions & 0 deletions playground/tests/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pip==24.0
git+https://github.com/dcermak/pytest_container
pytest-testinfra==10.1.0
pytest==8.1.1
requests==2.31.0
13 changes: 13 additions & 0 deletions playground/tests/test_alive.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import pytest_container
from .conftest import TW
import time

CONTAINER_IMAGES = [TW]


def test_etc_os_release_present(auto_container: pytest_container.container.ContainerData):
assert auto_container.connection.file("/etc/os-release").exists

def test_alive(auto_container: pytest_container.container.ContainerData, host):
time.sleep(10)
res = host.run_expect([0],f"curl http://localhost:{auto_container.forwarded_ports[0].host_port}",).stdout.strip()

0 comments on commit 645d10d

Please sign in to comment.