From c014fd0303e984471db423d55d4d0a77c7cf9a85 Mon Sep 17 00:00:00 2001 From: Liora Milbaum Date: Sun, 31 Mar 2024 14:32:28 +0300 Subject: [PATCH] Spin Model Server in chatbot recipe Signed-off-by: Liora Milbaum --- .github/workflows/chatbot.yaml | 6 ----- .github/workflows/model_servers.yaml | 6 ----- .../chatbot/Makefile | 2 +- .../chatbot/tests/conftest.py | 22 +++++++++++++++++++ .../chatbot/tests/test_alive.py | 4 ++++ requirements-test.txt | 1 + 6 files changed, 28 insertions(+), 13 deletions(-) diff --git a/.github/workflows/chatbot.yaml b/.github/workflows/chatbot.yaml index 23ed2a473..8a2865b37 100644 --- a/.github/workflows/chatbot.yaml +++ b/.github/workflows/chatbot.yaml @@ -4,15 +4,9 @@ on: pull_request: branches: - main - paths: - - ./recipes/natural_language_processing/chatbot/** - - .github/workflows/chatbot.yaml push: branches: - main - paths: - - ./recipes/natural_language_processing/chatbot/** - - .github/workflows/chatbot.yaml env: REGISTRY: ghcr.io diff --git a/.github/workflows/model_servers.yaml b/.github/workflows/model_servers.yaml index 76381c16f..f27e7d26e 100644 --- a/.github/workflows/model_servers.yaml +++ b/.github/workflows/model_servers.yaml @@ -4,15 +4,9 @@ on: pull_request: branches: - main - paths: - - ./model_servers/llamacpp_python/** - - .github/workflows/model_servers.yaml push: branches: - main - paths: - - ./model_servers/llamacpp_python/** - - .github/workflows/model_servers.yaml env: REGISTRY: ghcr.io diff --git a/recipes/natural_language_processing/chatbot/Makefile b/recipes/natural_language_processing/chatbot/Makefile index 043c09ab6..4864ef51e 100644 --- a/recipes/natural_language_processing/chatbot/Makefile +++ b/recipes/natural_language_processing/chatbot/Makefile @@ -37,4 +37,4 @@ run: .PHONY: test test: - pytest --log-cli-level NOTSET + pytest --log-cli-level NOTSET --driver Firefox diff --git a/recipes/natural_language_processing/chatbot/tests/conftest.py b/recipes/natural_language_processing/chatbot/tests/conftest.py index 6242ebbe4..73cc9c99a 100644 --- a/recipes/natural_language_processing/chatbot/tests/conftest.py +++ b/recipes/natural_language_processing/chatbot/tests/conftest.py @@ -1,6 +1,28 @@ import pytest_container import os +MS = pytest_container.Container( + url=f"containers-storage:{os.environ['REGISTRY']}/model_servers", + volume_mounts=[ + pytest_container.container.BindMount( + container_path="/locallm/models", + host_path="./", + flags=["ro"] + ) + ], + extra_environment_variables={ + "MODEL_PATH": "models/mistral-7b-instruct-v0.1.Q4_K_M.gguf", + "HOST": "0.0.0.0", + "PORT": "8001" + }, + forwarded_ports=[ + pytest_container.PortForwarding( + container_port=8001, + host_port=8001 + ) + ], + extra_launch_args=["--net=host"] + ) CB = pytest_container.Container( url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}", diff --git a/recipes/natural_language_processing/chatbot/tests/test_alive.py b/recipes/natural_language_processing/chatbot/tests/test_alive.py index a9a7e52bc..5a27408c0 100644 --- a/recipes/natural_language_processing/chatbot/tests/test_alive.py +++ b/recipes/natural_language_processing/chatbot/tests/test_alive.py @@ -10,3 +10,7 @@ def test_etc_os_release_present(auto_container: pytest_container.container.Conta @tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential()) def test_alive(auto_container: pytest_container.container.ContainerData, host): host.run_expect([0],f"curl http://localhost:{auto_container.forwarded_ports[0].host_port}",).stdout.strip() + +@tenacity.retry(stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_exponential()) +def test_url(auto_container: pytest_container.container.ContainerData, selenium): + selenium.get("http://localhost:{auto_container.forwarded_ports[0].host_port}") diff --git a/requirements-test.txt b/requirements-test.txt index 751d336dc..bf7187738 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,5 +1,6 @@ pip==24.0 pytest-container==0.4.0 +pytest-selenium==4.1.0 pytest-testinfra==10.1.0 pytest==8.1.1 requests==2.31.0