Skip to content

Commit

Permalink
Merge pull request #116 from lmilbaum/chatbot-tests
Browse files Browse the repository at this point in the history
chatbot tests
  • Loading branch information
sallyom authored Mar 28, 2024
2 parents 876425a + ff7cde0 commit c3a8aad
Show file tree
Hide file tree
Showing 8 changed files with 52 additions and 7 deletions.
6 changes: 6 additions & 0 deletions .github/workflows/chatbot.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,15 @@ on:
pull_request:
branches:
- main
paths:
- ./recipes/natural_language_processing/chatbot/**
- .github/workflows/chatbot.yaml
push:
branches:
- main
paths:
- ./recipes/natural_language_processing/chatbot/**
- .github/workflows/chatbot.yaml

env:
REGISTRY: ghcr.io
Expand Down
8 changes: 7 additions & 1 deletion .github/workflows/model_servers.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,15 @@ on:
pull_request:
branches:
- main
paths:
- ./model_servers/llamacpp_python/**
- .github/workflows/model_servers.yaml
push:
branches:
- main
paths:
- ./model_servers/llamacpp_python/**
- .github/workflows/model_servers.yaml

env:
REGISTRY: ghcr.io
Expand Down Expand Up @@ -43,7 +49,7 @@ jobs:

- name: Download model
working-directory: ./model_servers/llamacpp_python/
run: make models/llama-2-7b-chat.Q5_K_S.gguf
run: make llama-2-7b-chat.Q5_K_S.gguf

- name: Set up Python
uses: actions/[email protected]
Expand Down
4 changes: 2 additions & 2 deletions model_servers/llamacpp_python/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
build:
podman build -t ghcr.io/ai-lab-recipes/model_servers .

models/llama-2-7b-chat.Q5_K_S.gguf:
llama-2-7b-chat.Q5_K_S.gguf:
curl -s -S -L -f https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -z $@ -o $@.tmp && mv -f $@.tmp $@ 2>/dev/null || rm -f $@.tmp $@

.PHONY: install
Expand All @@ -15,4 +15,4 @@ run:

.PHONY: test
test:
pytest --collect-only tests --log-cli-level NOTSET
pytest --log-cli-level NOTSET
2 changes: 1 addition & 1 deletion model_servers/llamacpp_python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
volume_mounts=[
pytest_container.container.BindMount(
container_path="/locallm/models",
host_path="./models",
host_path="./",
flags=["ro"]
)
],
Expand Down
2 changes: 1 addition & 1 deletion recipes/natural_language_processing/chatbot/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ run:

.PHONY: test
test:
pytest --collect-only tests --log-cli-level NOTSET
pytest --log-cli-level NOTSET
Empty file.
23 changes: 23 additions & 0 deletions recipes/natural_language_processing/chatbot/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import pytest_container
import os


CB = pytest_container.Container(
url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}",
extra_environment_variables={
"MODEL_SERVICE_ENDPOINT": "http://10.88.0.1:8001/v1"
},
forwarded_ports=[
pytest_container.PortForwarding(
container_port=8501,
host_port=8501
)
],
extra_launch_args=["--net=host"]
)

def pytest_generate_tests(metafunc):
pytest_container.auto_container_parametrize(metafunc)

def pytest_addoption(parser):
pytest_container.add_logging_level_options(parser)
14 changes: 12 additions & 2 deletions recipes/natural_language_processing/chatbot/tests/test_alive.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,12 @@
def test_placeholder():
assert 1 == 1
import pytest_container
from .conftest import CB
import tenacity

CONTAINER_IMAGES = [CB]

def test_etc_os_release_present(auto_container: pytest_container.container.ContainerData):
assert auto_container.connection.file("/etc/os-release").exists

@tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential())
def test_alive(auto_container: pytest_container.container.ContainerData, host):
host.run_expect([0],f"curl http://localhost:{auto_container.forwarded_ports[0].host_port}",).stdout.strip()

0 comments on commit c3a8aad

Please sign in to comment.