forked from containers/podman-desktop-extension-ai-lab
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request containers#190 from Gregory-Pereira/abstract-model…
…-downloads-to-model-dir abstracting model downloads and file-normalization to models dir
- Loading branch information
Showing
16 changed files
with
199 additions
and
171 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -18,6 +18,7 @@ on: | |
|
||
env: | ||
REGISTRY: ghcr.io | ||
REGISTRY_ORG: containers | ||
|
||
jobs: | ||
build-and-push-image: | ||
|
@@ -82,7 +83,7 @@ jobs: | |
|
||
- name: Download model | ||
working-directory: ./model_servers/${{ matrix.directory }}/ | ||
run: make ${{ matrix.model }} | ||
run: make download-model-${{ matrix.model }} | ||
|
||
- name: Set up Python | ||
uses: actions/[email protected] | ||
|
@@ -96,16 +97,14 @@ jobs: | |
- name: Run non-gpu tests | ||
working-directory: ./model_servers/${{ matrix.directory }}/ | ||
if: ${{ matrix.no_gpu }} | ||
run: make test | ||
env: | ||
IMAGE_NAME: ${{ matrix.image_name }} | ||
run: make test REGISTRY=${{ env.REGISTRY }} IMAGE_NAME=${{ env.REGISTRY_ORG }}/${{ matrix.image_name}}:latest | ||
|
||
- name: Run cuda test | ||
working-directory: ./model_servers/${{ matrix.directory }}/ | ||
if: ${{ matrix.cuda }} | ||
run: make test-cuda | ||
env: | ||
IMAGE_NAME: ${{ matrix.image_name }} | ||
# - name: Run cuda test # we dont have cuda tests | ||
# working-directory: ./model_servers/${{ matrix.directory }}/ | ||
# if: ${{ matrix.cuda }} | ||
# run: make test-cuda | ||
# env: | ||
# IMAGE_NAME: ${{ matrix.image_name }} | ||
|
||
- name: Login to Container Registry | ||
if: github.event_name == 'push' && github.ref == 'refs/heads/main' | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
REGISTRY ?= quay.io | ||
REGISTRY_ORG ?= ai-lab | ||
COMPONENT ?= model_servers | ||
|
||
BIND_MOUNT_OPTIONS := ro | ||
OS := $(shell uname -s) | ||
ifeq ($(OS),Linux) | ||
BIND_MOUNT_OPTIONS := Z,ro | ||
endif | ||
|
||
.PHONY: build | ||
build: | ||
podman build --squash-all --build-arg $(PORT) -t $(IMAGE) . -f base/Containerfile | ||
|
||
.PHONY: install | ||
install: | ||
pip install -r tests/requirements.txt | ||
|
||
.PHONY: test | ||
test: | ||
@if [ ! -f "../../models/$(MODEL_NAME)" ]; then \ | ||
echo "Model file -- $(MODEL_NAME) -- not present in the models directory."; \ | ||
exit 1; \ | ||
else \ | ||
if [ ! -f "./$(MODEL_NAME)" ]; then \ | ||
ln -s ../../models/$(MODEL_NAME) ./$(MODEL_NAME); \ | ||
fi; \ | ||
REGISTRY=$(REGISTRY) IMAGE_NAME=$(IMAGE_NAME) MODEL_NAME=$(MODEL_NAME) MODEL_PATH=$(MODEL_PATH) PORT=$(PORT) pytest -vvv -s ; \ | ||
fi; | ||
|
||
.PHONY: clean | ||
clean: | ||
- rm ./$(MODEL_NAME) &> /dev/null | ||
|
||
.PHONY: run | ||
run: | ||
cd ../../models && \ | ||
podman run -it -d -p $(PORT):$(PORT) -v ./$(MODEL_NAME):$(MODELS_PATH)/$(MODEL_NAME):$(BIND_MOUNT_OPTIONS) -e MODEL_PATH=$(MODELS_PATH)/$(MODEL_NAME) -e HOST=0.0.0.0 -e PORT=$(PORT) $(IMAGE) | ||
|
||
.PHONY: podman-clean | ||
podman-clean: | ||
@container_ids=$$(podman ps --format "{{.ID}} {{.Image}}" | awk '$$2 == "$(IMAGE)" {print $$1}'); \ | ||
echo "removing all containers with IMAGE=$(IMAGE)"; \ | ||
for id in $$container_ids; do \ | ||
echo "Removing container: $$id,"; \ | ||
podman rm -f $$id; \ | ||
done |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,7 @@ | ||
import pytest_container | ||
from .conftest import MS | ||
import tenacity | ||
import os | ||
|
||
CONTAINER_IMAGES = [MS] | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,57 +1,20 @@ | ||
PORT := 8001 | ||
APP := whispercpp | ||
IMAGE := quay.io/ai-lab/model_servers/$(APP):latest | ||
CUDA_IMAGE := quay.io/ai-lab/model_servers/$(APP)_cuda:latest | ||
VULKAN_IMAGE :=quay.io/ai-lab/model_servers/$(APP)_vulkan:latest | ||
PORT ?= 8001 | ||
|
||
# ----- MODEL OPTIONS ----- | ||
include ../common/Makefile.common | ||
|
||
WHISPER_SMALL_MODEL_NAME := ggml-small.bin | ||
WHISPER_SMALL_MODEL_URL := https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin | ||
|
||
WHISPER_BASE_MODEL_NAME := ggml-base.en.bin | ||
WHISPER_BASE_MODEL_URL := https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin | ||
|
||
SELECTED_MODEL_NAME := $(or $(SELECTED_MODEL),$(WHISPER_SMALL_MODEL_NAME)) | ||
SELECTED_MODEL_URL := $(or $(SELECTED_MODEL_LINK),$(WHISPER_SMALL_MODEL_URL)) | ||
|
||
# --- END MODEL OPTIONS --- | ||
IMAGE_NAME ?= $(REGISTRY_ORG)/$(COMPONENT)/$(APP):latest | ||
IMAGE ?= $(REGISTRY)/$(IMAGE_NAME) | ||
# CUDA_IMAGE_NAME := $(REGISTRY)/$(BASE_IMAGE_NAME)/$(APP)_cuda:latest | ||
# VULKAN_IMAGE := $(REGISTRY)/$(BASE_IMAGE_NAME)/$(APP)_vulkan:latest | ||
|
||
MODELS_PATH := /app/models | ||
|
||
BIND_MOUNT_OPTIONS := ro | ||
OS := $(shell uname -s) | ||
ifeq ($(OS),Linux) | ||
BIND_MOUNT_OPTIONS := Z,ro | ||
endif | ||
MODEL_NAME ?= ggml-small.bin | ||
|
||
.PHONY: all | ||
all: build whisper-small run | ||
|
||
.PHONY: build | ||
build: | ||
podman build -t $(IMAGE) . -f Containerfile | ||
all: build download-model-whisper-small run | ||
|
||
.PHONY: whisper-small | ||
whisper-small: | ||
.PHONY: download-model-whisper-small # small .bin model type testing | ||
download-model-whisper-small: | ||
cd ../../models && \ | ||
curl -s -S -L -f $(WHISPER_SMALL_MODEL_URL) -z $(WHISPER_SMALL_MODEL_NAME) -o $(WHISPER_SMALL_MODEL_NAME).tmp && mv -f $(WHISPER_SMALL_MODEL_NAME).tmp $(WHISPER_SMALL_MODEL_NAME) 2>/dev/null || rm -f $(WHISPER_SMALL_MODEL_NAME).tmp $(WHISPER_SMALL_MODEL_NAME) | ||
|
||
.PHONY: install | ||
install: | ||
pip install -r tests/requirements.txt | ||
|
||
.PHONY: download-model-whisper-base | ||
download-model-whisper-base: | ||
cd ../../models && \ | ||
curl -s -S -L -f $(WHISPER_BASE_MODEL_URL) -z $(WHISPER_BASE_MODEL_NAME) -o $(WHISPER_BASE_MODEL_NAME).tmp && mv -f $(WHISPER_BASE_MODEL_NAME).tmp $(WHISPER_BASE_MODEL_NAME) 2>/dev/null || rm -f $(WHISPER_BASE_MODEL_NAME).tmp $(WHISPER_BASE_MODEL_NAME) | ||
|
||
.PHONY: run | ||
run: | ||
cd ../../models && \ | ||
podman run -d --rm -it -p $(PORT):$(PORT) -v ./$(SELECTED_MODEL_NAME):$(MODELS_PATH)/$(SELECTED_MODEL_NAME):$(BIND_MOUNT_OPTIONS) -e HOST=0.0.0.0 -e MODEL_PATH=$(MODELS_PATH)/$(SELECTED_MODEL_NAME) -e PORT=$(PORT) $(IMAGE) | ||
|
||
.PHONY: test | ||
test: | ||
curl -H "Cache-Control: no-cache" -s -S -L -f $(SELECTED_MODEL_URL) -z ./model.gguf -o ./model.gguf.tmp && mv -f ./model.gguf.tmp ./model.gguf 2>/dev/null || rm -f ./model.gguf.tmp ./model.gguf | ||
pytest --log-cli-level NOTSET | ||
make MODEL_NAME=ggml-small.bin MODEL_URL=https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin -f Makefile download-model |
Oops, something went wrong.