Skip to content

Commit

Permalink
updating docs to work with new makefile changes
Browse files Browse the repository at this point in the history
Signed-off-by: greg pereira <[email protected]>
  • Loading branch information
Gregory-Pereira committed Apr 8, 2024
1 parent a1c4bc3 commit 604b47c
Show file tree
Hide file tree
Showing 6 changed files with 86 additions and 86 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/chatbot.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ jobs:

- name: Run Functional Tests
working-directory: ./recipes/natural_language_processing/chatbot
run: make functional_tests
run: make functional-tests

- name: Login to Registry
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/testing-framework.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,9 @@ jobs:

- name: Run Integration Tests
working-directory: ./main/recipes/natural_language_processing/chatbot
run: make URL=${{ steps.terraform-output.outputs.url }} integration_tests
run: make integration-tests
env:
URL: ${{ steps.terraform-output.outputs.url }}

- name: Destroy Test Environment
id: down
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ models/*
model_servers/llamacpp_python/model.gguf
!models/convert_models/*
!models/Containerfile
!models/README.md
!models/README.md
recipes/chromedriver
38 changes: 32 additions & 6 deletions recipes/common/Makefile.common
Original file line number Diff line number Diff line change
@@ -1,14 +1,27 @@
REGISTRY ?= quay.io
IMAGE_NAME ?= ai-lab/${APP}:latest
CHROMADB_IMAGE ?= $(REGISTRY)/ai-lab/chromadb:latest
MODEL_IMAGE ?= $(REGISTRY)/ai-lab/mistral-7b-instruct:latest
REGISTRY_ORG ?= ai-lab

IMAGE_NAME ?= $(REGISTRY_ORG)/${APP}:latest
CHROMADB_IMAGE ?= $(REGISTRY)/$(REGISTRY_ORG)/chromadb:latest
MODEL_IMAGE ?= $(REGISTRY)/$(REGISTRY_ORG)/mistral-7b-instruct:latest
APP_IMAGE ?= $(REGISTRY)/$(IMAGE_NAME)
SERVER_IMAGE ?= $(REGISTRY)/ai-lab/llamacpp-python:latest
SERVER_IMAGE ?= $(REGISTRY)/$(REGISTRY_ORG)/llamacpp-python:latest

SSH_PUBKEY ?= $(shell cat ${HOME}/.ssh/id_rsa.pub;)
BOOTC_IMAGE ?= quay.io/ai-lab/${APP}-bootc:latest
BOOTC_IMAGE ?= quay.io/$(REGISTRY_ORG)/${APP}-bootc:latest

FROM ?=
ARCH ?=

CHROMEDRIVER_VERSION := 103.0.5060.53
CHROMEDRIVER_MIRROR := https://chromedriver.storage.googleapis.com
CHROMEDRIVER_DOWNLOAD_PATH :=
CHROMEDRIVER_INSTALLATION_PATH ?=

CHROMEDRIVER_EXISTS ?= $(shell command -v chromedriver1)
LOCAL_CHROMEDRIVER_EXISTS ?= $(shell command -v $(CHROMEDRIVER_INSTALLATION_PATH)/chromedriver)
UNZIP_EXISTS ?= $(shell command -v unzip)

.PHONY: build
build:
podman build $${ARCH:+--arch $${ARCH}} $${FROM:+--from $${FROM}} -f builds/Containerfile -t ${APP_IMAGE} .
Expand All @@ -17,6 +30,19 @@ build:
bootc: quadlet
podman build $${ARCH:+--arch $${ARCH}} $${FROM:+--from $${FROM}} --cap-add SYS_ADMIN --build-arg "SSH_PUBKEY=$(SSH_PUBKEY)" -f bootc/Containerfile -t ${BOOTC_IMAGE} .

.PHONY: install-chromedriver
install-chromedriver:
@if [[ -z "$(CHROMEDRIVER_EXISTS)" ]] && [[ -z "$(LOCAL_CHROMEDRIVER_EXISTS)" ]]; then \
if [[ -n "$(UNZIP_EXISTS)" ]]; then \
curl -sLO $(CHROMEDRIVER_MIRROR)/$(CHROMEDRIVER_VERSION)/$(CHROMEDRIVER_DOWNLOAD_PATH); \
unzip $(CHROMEDRIVER_DOWNLOAD_PATH); \
mv chromedriver $(CHROMEDRIVER_INSTALLATION_PATH); \
rm ./$(CHROMEDRIVER_DOWNLOAD_PATH); \
elif [[ -z "$(UNZIP_EXISTS)" ]]; then \
echo "Install make target requires unizp binary."; \
fi; \
fi;

.PHONY: quadlet
quadlet:
# Modify quadlet files to match the server, model and app image
Expand All @@ -37,7 +63,7 @@ quadlet:

.PHONY: run
run:
podman run -it -p 8501:8501 -e MODEL_SERVICE_ENDPOINT=http://10.88.0.1:8001/v1 ${APP_IMAGE}
podman run -it -p $(PORT):$(PORT) -e MODEL_SERVICE_ENDPOINT=http://10.88.0.1:8001/v1 ${APP_IMAGE}

.PHONY: clean
clean:
Expand Down
53 changes: 28 additions & 25 deletions recipes/natural_language_processing/chatbot/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,9 @@ PORT ?= 8501

include ../../common/Makefile.common

CHROMEDRIVER_EXISTS ?= $(shell command -v chromedriver)
UNZIP_EXISTS ?= $(shell command -v unzip)

CHROMEDRIVER_VERSION := 103.0.5060.53
CHROMEDRIVER_MIRROR := https://chromedriver.storage.googleapis.com
CHROMEDRIVER_DOWNLOAD_PATH :=

OS := $(shell uname -s)
ARCH := $(shell uname -m)
CHROMEDRIVER_INSTALLATION_PATH := $(shell realpath ../..)

ifeq ($(OS),Darwin) # This structure may vary if we upgrade chromedriver, see index: https://chromedriver.storage.googleapis.com/index.html
ifeq ($(ARCH),amd64)
Expand All @@ -25,21 +19,30 @@ endif

.PHONY: install
install:
@if [[ -z "$(CHROMEDRIVER_EXISTS)" ]]; then \
if [[ -n "$(UNZIP_EXISTS)" ]]; then \
curl -sLO $(CHROMEDRIVER_MIRROR)/$(CHROMEDRIVER_VERSION)/$(CHROMEDRIVER_DOWNLOAD_PATH); \
unzip $(CHROMEDRIVER_DOWNLOAD_PATH); \
mv chromedriver /usr/local/bin/chromedriver; \
elif [[ -z "$(UNZIP_EXISTS)" ]]; then \
echo "Install make target requires unizp binary."; \
fi; \
fi; \
pip install -r tests/requirements.txt

.PHONY: functional_tests
functional_tests:
IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_EXISTS) tests/functional

.PHONY: integration_test
integration_tests:
URL=${URL} IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_EXISTS) tests/integration
$(MAKE) install-chromedriver CHROMEDRIVER_INSTALLATION_PATH=${CHROMEDRIVER_INSTALLATION_PATH}
pip install -q -r tests/requirements.txt

.PHONY: functional-tests
functional-tests:
@if [[ -n "$(LOCAL_CHROMEDRIVER_EXISTS)" ]]; then \
IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_INSTALLATION_PATH)/chromedriver tests/functional; \
elif [[ -n "$(CHROMEDRIVER_EXISTS)" ]] && [[ -z "$(LOCAL_CHROMEDRIVER_EXISTS)" ]]; then \
IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_EXISTS) tests/functional; \
else \
echo "fetching chromedriver"; \
make install; \
IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_INSTALLATION_PATH)/chromedriver tests/functional; \
fi;

.PHONY: integration-tests
integration-tests:
@if [[ -n "$(LOCAL_CHROMEDRIVER_EXISTS)" ]]; then \
URL=${URL} IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_INSTALLATION_PATH)/chromedriver tests/integration; \
elif [[ -n "$(CHROMEDRIVER_EXISTS)" ]] && [[ -z "$(LOCAL_CHROMEDRIVER_EXISTS)" ]]; then \
URL=${URL} IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_EXISTS) tests/integration; \
else \
echo "fetching chromedriver"; \
make install; \
URL=${URL} IMAGE_NAME=${IMAGE_NAME} REGISTRY=${REGISTRY} pytest -vvv --driver=Chrome --driver-path=$(CHROMEDRIVER_EXISTS) tests/integration
fi;

72 changes: 20 additions & 52 deletions recipes/natural_language_processing/chatbot/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,59 +64,35 @@ There are a number of options for quantization level, but we recommend `Q4_K_M`.
The recommended model can be downloaded using the code snippet below:

```bash
cd models
wget https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/resolve/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf
cd ../
cd ../../../models
curl -sLO https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/resolve/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf
cd ../recipes/natural_language_processing/chatbot
```

_A full list of supported open models is forthcoming._


### Build the Model Service
### Build and Deploy the Model Service

The complete instructions for building and deploying the Model Service can be found in the
[llamacpp_python model-service document](../model_servers/llamacpp_python/README.md).
[llamacpp_python model-service document](../../../model_servers/llamacpp_python/README.md).

The Model Service can be built from the root directory with the following code snippet:
The Model Service can be built and ran from make commands from the [llamacpp_python directory](../../../model_servers/llamacpp_python/).

```bash
cd model_servers/llamacpp_python
podman build -t llamacppserver -f base/Containerfile .
# from path model_servers/llamacpp_python from repo containers/ai-lab-recipes
make -f Makefile build && make -f Makefile run
```

### Deploy the Model Service
If you wish to run this as a codesnippet instead of a make command checkout the [Makefile](../../../model_servers/llamacpp_python/Makefile) to get a sense of what the code for that would look like.

The complete instructions for building and deploying the Model Service can be found in the
[llamacpp_python model-service document](../model_servers/llamacpp_python/README.md).

The local Model Service relies on a volume mount to the localhost to access the model files. You can start your local
Model Service using the following Podman command:

```
podman run --rm -it \
-p 8001:8001 \
-v Local/path/to/locallm/models:/locallm/models \
-e MODEL_PATH=models/<model-filename> \
-e HOST=0.0.0.0 \
-e PORT=8001 \
llamacppserver
```

### Build the AI Application

Now that the Model Service is running we want to build and deploy our AI Application. Use the provided Containerfile to build the AI Application image from the `chatbot-langchain/` directory.

```bash
cd chatbot
make APPIMAGE=chatbot build
```

### Deploy the AI Application
### Build and Deploy the AI Application

Make sure the Model Service is up and running before starting this container image. When starting the AI Application container image we need to direct it to the correct `MODEL_SERVICE_ENDPOINT`. This could be any appropriately hosted Model Service (running locally or in the cloud) using an OpenAI compatible API. In our case the Model Service is running inside the Podman machine so we need to provide it with the appropriate address `10.88.0.1`. The following Podman command can be used to run your AI Application:
Make sure the Model Service is up and running before starting this container image. When starting the AI Application container image we need to direct it to the correct `MODEL_SERVICE_ENDPOINT`. This could be any appropriately hosted Model Service (running locally or in the cloud) using an OpenAI compatible API. In our case the Model Service is running inside the Podman machine so we need to provide it with the appropriate address `10.88.0.1`. To build and deploy the AI application use the following:

```bash
podman run --rm -it -p 8501:8501 -e MODEL_SERVICE_ENDPOINT=http://10.88.0.1:8001/v1 chatbot
# Run this from the current directory (path recipes/natural_language_processing/chatbot from repo containers/ai-lab-recipes)
make -f Makefile build && make -f Makefile run
```

### Interact with the AI Application
Expand All @@ -125,18 +101,12 @@ Everything should now be up an running with the chat application available at [`

### Embed the AI Application in a Bootable Container Image

To build a bootable container image that includes this sample chatbot workload as a service that starts when a system is booted, cd into this folder
and run:


```
make BOOTCIMAGE=quay.io/your/chatbot-bootc:latest bootc
```
To build a bootable container image that includes this sample chatbot workload as a service that starts when a system is booted, run: `make -f Makefile bootc`. You can optionally override the default image / tag you want to give the make command by specifiying it as follows: `make -f Makefile BOOTC_IMAGE=<your_bootc_image> bootc`.

Substituting the bootc/Containerfile FROM command is simple using the Makefile FROM option.

```
make FROM=registry.redhat.io/rhel9-beta/rhel-bootc:9.4 BOOTCIMAGE=quay.io/your/chatbot-bootc:latest bootc
```bash
make FROM=registry.redhat.io/rhel9-beta/rhel-bootc:9.4 bootc
```

Selecting the ARCH for the bootc/Containerfile is simple using the Makefile ARCH= option.
Expand All @@ -148,15 +118,13 @@ make ARCH=x86_64 bootc
The magic happens when you have a bootc enabled system running. If you do, and you'd like to update the operating system to the OS you just built
with the chatbot application, it's as simple as ssh-ing into the bootc system and running:

```bash
bootc switch quay.io/ai-lab/chatbot-bootc:latest
```
bootc switch quay.io/your/chatbot-bootc:latest
```

Upon a reboot, you'll see that the chatbot service is running on the system.

Check on the service with
Upon a reboot, you'll see that the chatbot service is running on the system. Check on the service with:

```
```bash
ssh user@bootc-system-ip
sudo systemctl status chatbot
```
Expand Down

0 comments on commit 604b47c

Please sign in to comment.