From 025b577c3fc1ab05bf6b45f7a666d51b05b5ce25 Mon Sep 17 00:00:00 2001 From: Liora Milbaum Date: Mon, 15 Apr 2024 12:20:32 +0300 Subject: [PATCH] build cuda image Signed-off-by: Liora Milbaum --- .github/workflows/model_servers.yaml | 14 -------------- model_servers/llamacpp_python/cuda/Containerfile | 7 +++---- model_servers/llamacpp_python/tests/conftest.py | 2 +- 3 files changed, 4 insertions(+), 19 deletions(-) diff --git a/.github/workflows/model_servers.yaml b/.github/workflows/model_servers.yaml index 96959ce6..eb08b0d5 100644 --- a/.github/workflows/model_servers.yaml +++ b/.github/workflows/model_servers.yaml @@ -32,12 +32,6 @@ jobs: directory: llamacpp_python platforms: linux/amd64,linux/arm64 no_gpu: 1 - #- image_name: llamacpp_python_vulkan - # model: mistral - # flavor: vulkan - # directory: llamacpp_python - # platforms: linux/arm64 - # vulkan: 1 - image_name: llamacpp_python_cuda model: mistral flavor: cuda @@ -100,13 +94,6 @@ jobs: if: ${{ matrix.no_gpu }} run: make test REGISTRY=${{ env.REGISTRY }} IMAGE_NAME=${{ env.REGISTRY_ORG }}/${{ matrix.image_name}}:latest - # - name: Run cuda test # we dont have cuda tests - # working-directory: ./model_servers/${{ matrix.directory }}/ - # if: ${{ matrix.cuda }} - # run: make test-cuda - # env: - # IMAGE_NAME: ${{ matrix.image_name }} - - name: Login to Container Registry if: github.event_name == 'push' && github.ref == 'refs/heads/main' uses: redhat-actions/podman-login@v1.7 @@ -116,7 +103,6 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Push image - id: push_image if: github.event_name == 'push' && github.ref == 'refs/heads/main' uses: redhat-actions/push-to-registry@v2.8 with: diff --git a/model_servers/llamacpp_python/cuda/Containerfile b/model_servers/llamacpp_python/cuda/Containerfile index ed8ff375..8c116dae 100644 --- a/model_servers/llamacpp_python/cuda/Containerfile +++ b/model_servers/llamacpp_python/cuda/Containerfile @@ -1,8 +1,7 @@ FROM quay.io/opendatahub/workbench-images:cuda-ubi9-python-3.9-20231206 WORKDIR /locallm COPY src . -RUN pip install --upgrade pip -ENV CMAKE_ARGS="-DLLAMA_CUBLAS=on" +ENV CMAKE_ARGS="-DLLAMA_CUBLAS=on -DLLAMA_AVX2=OFF -DLLAMA_FMA=OFF -DLLAMA_F16C=OFF" ENV FORCE_CMAKE=1 -RUN pip install --no-cache-dir --upgrade -r /locallm/requirements.txt -ENTRYPOINT [ "sh", "run.sh" ] \ No newline at end of file +RUN pip install --no-cache-dir -r ./requirements.txt +ENTRYPOINT [ "sh", "run.sh" ] diff --git a/model_servers/llamacpp_python/tests/conftest.py b/model_servers/llamacpp_python/tests/conftest.py index 6cafe06f..24cb9440 100644 --- a/model_servers/llamacpp_python/tests/conftest.py +++ b/model_servers/llamacpp_python/tests/conftest.py @@ -32,7 +32,7 @@ PORT = 8001 MS = pytest_container.Container( - url=f"containers-storage:{REGISTRY}/{IMAGE_NAME}", + url=f"containers-storage:{os.environ['REGISTRY']}/{os.environ['IMAGE_NAME']}", volume_mounts=[ pytest_container.container.BindMount( container_path="{MODEL_PATH}/{MODEL_NAME}".format(MODEL_PATH=MODEL_PATH, MODEL_NAME=MODEL_NAME),