From 74bf16f513f5a7982c566df67a3843aefc406954 Mon Sep 17 00:00:00 2001 From: Jeff MAURY Date: Fri, 26 Jan 2024 12:01:51 +0100 Subject: [PATCH 1/8] fix: handle JetBrains IDEs in .gitignore (#155) Fixes #154 Signed-off-by: Jeff MAURY --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 831018a8d..8cea9f0ac 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ node_modules dist .eslintcache **/coverage +.idea From 6c2133e76a558dcfe59960c72f94b05de853db0e Mon Sep 17 00:00:00 2001 From: Luca Stocchi <49404737+lstocchi@users.noreply.github.com> Date: Fri, 26 Jan 2024 12:21:48 +0100 Subject: [PATCH 2/8] feat: start pod with sample and model service (#144) Signed-off-by: lstocchi --- .../src/managers/applicationManager.spec.ts | 67 +++++++++++- .../src/managers/applicationManager.ts | 102 +++++++++++++++--- packages/backend/src/utils/utils.ts | 47 ++++++++ 3 files changed, 198 insertions(+), 18 deletions(-) create mode 100644 packages/backend/src/utils/utils.ts diff --git a/packages/backend/src/managers/applicationManager.spec.ts b/packages/backend/src/managers/applicationManager.spec.ts index 3ba39257a..82af4538a 100644 --- a/packages/backend/src/managers/applicationManager.spec.ts +++ b/packages/backend/src/managers/applicationManager.spec.ts @@ -1,5 +1,5 @@ import { type MockInstance, describe, expect, test, vi, beforeEach } from 'vitest'; -import type { ImageInfo, PodInfo } from './applicationManager'; +import type { ContainerAttachedInfo, ImageInfo, PodInfo } from './applicationManager'; import { ApplicationManager } from './applicationManager'; import type { RecipeStatusRegistry } from '../registries/RecipeStatusRegistry'; import type { GitManager } from './gitManager'; @@ -13,6 +13,7 @@ import path from 'node:path'; import type { AIConfig, ContainerConfig } from '../models/AIConfig'; import * as portsUtils from '../utils/ports'; import { goarch } from '../utils/arch'; +import * as utils from '../utils/utils'; const mocks = vi.hoisted(() => { return { @@ -23,6 +24,8 @@ const mocks = vi.hoisted(() => { createPodMock: vi.fn(), createContainerMock: vi.fn(), replicatePodmanContainerMock: vi.fn(), + startContainerMock: vi.fn(), + startPod: vi.fn(), }; }); vi.mock('../models/AIConfig', () => ({ @@ -36,6 +39,8 @@ vi.mock('@podman-desktop/api', () => ({ createPod: mocks.createPodMock, createContainer: mocks.createContainerMock, replicatePodmanContainer: mocks.replicatePodmanContainerMock, + startContainer: mocks.startContainerMock, + startPod: mocks.startPod, }, })); let setTaskMock: MockInstance; @@ -668,6 +673,13 @@ describe('createPod', async () => { protocol: '', range: 1, }, + { + container_port: 8082, + host_port: 9000, + host_ip: '', + protocol: '', + range: 1, + }, ], }); }); @@ -710,7 +722,7 @@ describe('createApplicationPod', () => { vi.spyOn(manager, 'createPod').mockResolvedValue(pod); const createAndAddContainersToPodMock = vi .spyOn(manager, 'createAndAddContainersToPod') - .mockImplementation((_pod: PodInfo, _images: ImageInfo[], _modelPath: string) => Promise.resolve()); + .mockImplementation((_pod: PodInfo, _images: ImageInfo[], _modelPath: string) => Promise.resolve([])); await manager.createApplicationPod(images, 'path', taskUtils); expect(createAndAddContainersToPodMock).toBeCalledWith(pod, images, 'path'); expect(setTaskMock).toBeCalledWith({ @@ -720,3 +732,54 @@ describe('createApplicationPod', () => { }); }); }); + +describe('restartContainerWhenEndpointIsUp', () => { + const manager = new ApplicationManager( + '/home/user/aistudio', + {} as unknown as GitManager, + {} as unknown as RecipeStatusRegistry, + {} as unknown as ModelsManager, + ); + const containerAttachedInfo: ContainerAttachedInfo = { + name: 'name', + endPoint: 'endpoint', + }; + test('restart container if endpoint is alive', async () => { + vi.spyOn(utils, 'isEndpointAlive').mockResolvedValue(true); + await manager.restartContainerWhenEndpointIsUp('engine', containerAttachedInfo); + expect(mocks.startContainerMock).toBeCalledWith('engine', 'name'); + }); +}); + +describe('runApplication', () => { + const manager = new ApplicationManager( + '/home/user/aistudio', + {} as unknown as GitManager, + {} as unknown as RecipeStatusRegistry, + {} as unknown as ModelsManager, + ); + const pod: PodInfo = { + engineId: 'engine', + Id: 'id', + containers: [ + { + name: 'first', + endPoint: 'url', + }, + { + name: 'second', + }, + ], + }; + test('check startPod is called and also restartContainerWhenEndpointIsUp for sample app', async () => { + const restartContainerWhenEndpointIsUpMock = vi + .spyOn(manager, 'restartContainerWhenEndpointIsUp') + .mockImplementation((_engineId: string, _container: ContainerAttachedInfo) => Promise.resolve()); + await manager.runApplication(pod, taskUtils); + expect(mocks.startPod).toBeCalledWith(pod.engineId, pod.Id); + expect(restartContainerWhenEndpointIsUpMock).toBeCalledWith(pod.engineId, { + name: 'first', + endPoint: 'url', + }); + }); +}); diff --git a/packages/backend/src/managers/applicationManager.ts b/packages/backend/src/managers/applicationManager.ts index a5afb60b4..df5a252a9 100644 --- a/packages/backend/src/managers/applicationManager.ts +++ b/packages/backend/src/managers/applicationManager.ts @@ -32,6 +32,7 @@ import type { ModelInfo } from '@shared/src/models/IModelInfo'; import type { ModelsManager } from './modelsManager'; import { getPortsInfo } from '../utils/ports'; import { goarch } from '../utils/arch'; +import { isEndpointAlive, timeout } from '../utils/utils'; export const CONFIG_FILENAME = 'ai-studio.yaml'; @@ -45,9 +46,15 @@ interface AIContainers { containers: ContainerConfig[]; } +export interface ContainerAttachedInfo { + name: string; + endPoint?: string; +} + export interface PodInfo { engineId: string; Id: string; + containers?: ContainerAttachedInfo[]; } export interface ImageInfo { @@ -89,10 +96,54 @@ export class ApplicationManager { ); // create a pod containing all the containers to run the application - await this.createApplicationPod(images, modelPath, taskUtil); + const podInfo = await this.createApplicationPod(images, modelPath, taskUtil); + + await this.runApplication(podInfo, taskUtil); } - async createApplicationPod(images: ImageInfo[], modelPath: string, taskUtil: RecipeStatusUtils) { + async runApplication(podInfo: PodInfo, taskUtil: RecipeStatusUtils) { + taskUtil.setTask({ + id: `running-${podInfo.Id}`, + state: 'loading', + name: `Starting application`, + }); + + // it starts the pod + await containerEngine.startPod(podInfo.engineId, podInfo.Id); + + // most probably the sample app will fail at starting as it tries to connect to the model_service which is still loading the model + // so we check if the endpoint is ready before to restart the sample app + await Promise.all( + podInfo.containers?.map(async container => { + if (!container.endPoint) { + return; + } + return this.restartContainerWhenEndpointIsUp(podInfo.engineId, container).catch((e: unknown) => { + console.error(String(e)); + }); + }), + ); + + taskUtil.setTask({ + id: `running-${podInfo.Id}`, + state: 'success', + name: `Application is running`, + }); + } + + async restartContainerWhenEndpointIsUp(engineId: string, container: ContainerAttachedInfo): Promise { + const alive = await isEndpointAlive(container.endPoint); + if (alive) { + await containerEngine.startContainer(engineId, container.name); + return; + } + await timeout(5000); + await this.restartContainerWhenEndpointIsUp(engineId, container).catch((error: unknown) => { + console.error('Error monitoring endpoint', error); + }); + } + + async createApplicationPod(images: ImageInfo[], modelPath: string, taskUtil: RecipeStatusUtils): Promise { // create empty pod let pod: PodInfo; try { @@ -113,20 +164,29 @@ export class ApplicationManager { name: `Creating application`, }); - await this.createAndAddContainersToPod(pod, images, modelPath); + const attachedContainers = await this.createAndAddContainersToPod(pod, images, modelPath); taskUtil.setTask({ id: pod.Id, state: 'success', name: `Creating application`, }); + + pod.containers = attachedContainers; + return pod; } - async createAndAddContainersToPod(pod: PodInfo, images: ImageInfo[], modelPath: string) { + async createAndAddContainersToPod( + pod: PodInfo, + images: ImageInfo[], + modelPath: string, + ): Promise { + const containers: ContainerAttachedInfo[] = []; await Promise.all( images.map(async image => { let hostConfig: unknown; let envs: string[] = []; + let endPoint: string; // if it's a model service we mount the model as a volume if (image.modelService) { const modelName = path.basename(modelPath); @@ -148,7 +208,8 @@ export class ApplicationManager { // TODO: remove static port const modelService = images.find(image => image.modelService); if (modelService && modelService.ports.length > 0) { - envs = [`MODEL_ENDPOINT=http://localhost:${modelService.ports[0]}`]; + endPoint = `http://localhost:${modelService.ports[0]}`; + envs = [`MODEL_ENDPOINT=${endPoint}`]; } } const createdContainer = await containerEngine @@ -164,41 +225,50 @@ export class ApplicationManager { // now, for each container, put it in the pod if (createdContainer) { try { + const podifiedName = this.getRandomName(`${image.appName}-podified`); await containerEngine.replicatePodmanContainer( { id: createdContainer.id, engineId: pod.engineId, }, { engineId: pod.engineId }, - { pod: pod.Id, name: this.getRandomName(`${image.appName}-podified`) }, + { pod: pod.Id, name: podifiedName }, ); + containers.push({ + name: podifiedName, + endPoint, + }); } catch (error) { console.error(error); } } }), ); + return containers; } async createPod(images: ImageInfo[]): Promise { // find the exposed port of the sample app so we can open its ports on the new pod const sampleAppImageInfo = images.find(image => !image.modelService); if (!sampleAppImageInfo) { - console.error('no image found'); + console.error('no sample app image found'); throw new Error('no sample app found'); } const portmappings: PodCreatePortOptions[] = []; // N.B: it may not work with ranges - for (const exposed of sampleAppImageInfo.ports) { - const localPorts = await getPortsInfo(exposed); - portmappings.push({ - container_port: parseInt(exposed), - host_port: parseInt(localPorts), - host_ip: '', - protocol: '', - range: 1, - }); + // we expose all ports so we can check the model service if it is actually running + for (const image of images) { + for (const exposed of image.ports) { + const localPorts = await getPortsInfo(exposed); + portmappings.push({ + container_port: parseInt(exposed), + host_port: parseInt(localPorts), + host_ip: '', + protocol: '', + range: 1, + }); + } } // create new pod diff --git a/packages/backend/src/utils/utils.ts b/packages/backend/src/utils/utils.ts new file mode 100644 index 000000000..33ffa0fe9 --- /dev/null +++ b/packages/backend/src/utils/utils.ts @@ -0,0 +1,47 @@ +/********************************************************************** + * Copyright (C) 2024 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ***********************************************************************/ +import * as http from 'node:http'; + +export async function timeout(time: number): Promise { + return new Promise(resolve => { + setTimeout(resolve, time); + }); +} + +export async function isEndpointAlive(endPoint: string): Promise { + return new Promise(resolve => { + const req = http.get(endPoint, res => { + res.on('data', () => { + // do nothing + }); + + res.on('end', () => { + console.log(res); + if (res.statusCode === 200) { + resolve(true); + } else { + resolve(false); + } + }); + }); + req.once('error', err => { + console.log('Error while pinging endpoint', err); + resolve(false); + }); + }); +} From 21584c4cff5912c9ab09e24cd7dfd3b9edef923d Mon Sep 17 00:00:00 2001 From: Jeff MAURY Date: Fri, 26 Jan 2024 11:01:43 +0100 Subject: [PATCH 3/8] fix: rename from studio-extension to ai-studio OCI image is also renamed and main branch is published as nightly Fixes #124 Signed-off-by: Jeff MAURY --- .github/workflows/build-next.yaml | 10 +++++----- README.md | 6 +++--- package.json | 6 +++--- packages/backend/package.json | 6 +++--- packages/frontend/package.json | 2 +- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/build-next.yaml b/.github/workflows/build-next.yaml index 822b63287..1e646cec7 100644 --- a/.github/workflows/build-next.yaml +++ b/.github/workflows/build-next.yaml @@ -57,10 +57,10 @@ jobs: - name: Publish Image id: publish-image run: | - IMAGE_NAME=ghcr.io/${{ github.repository_owner }}/studio-extension - IMAGE_LATEST=${IMAGE_NAME}:latest + IMAGE_NAME=ghcr.io/${{ github.repository_owner }}/ai-studio + IMAGE_NIGHTLY=${IMAGE_NAME}:nightly IMAGE_SHA=${IMAGE_NAME}:${GITHUB_SHA} - podman build -t $IMAGE_LATEST . - podman push $IMAGE_LATEST - podman tag $IMAGE_LATEST $IMAGE_SHA + podman build -t $IMAGE_NIGHTLY . + podman push $IMAGE_NIGHTLY + podman tag $IMAGE_NIGHTLY $IMAGE_SHA podman push $IMAGE_SHA diff --git a/README.md b/README.md index a2e48e30c..ae50624ee 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ -# studio extension +# AI studio ## Installing a development version You can install this extension from Podman Desktop UI > ⚙ Settings > Extensions > Install a new extension from OCI Image. -The name of the image to use is `ghcr.io/projectatomic/studio-extension:latest`. +The name of the image to use is `ghcr.io/projectatomic/ai-studio:latest`. -You can get earlier tags for the image at https://github.com/projectatomic/studio-extension/pkgs/container/studio-extension. +You can get earlier tags for the image at https://github.com/projectatomic/studio-extension/pkgs/container/ai-studio. These images contain development versions of the extension. There is no stable release yet. diff --git a/package.json b/package.json index e648818e6..8a5933263 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { - "name": "studio-extension-monorepo", - "displayName": "studio-extension-monorepo", - "description": "studio-extension-monorepo", + "name": "ai-studio-monorepo", + "displayName": "ai-studio-monorepo", + "description": "ai-studio-monorepo", "publisher": "redhat", "version": "0.0.0", "private": true, diff --git a/packages/backend/package.json b/packages/backend/package.json index 50a3fe239..1642a89f3 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -1,7 +1,7 @@ { - "name": "studio-extension", - "displayName": "studio extension", - "description": "Podman Desktop Studio Extension", + "name": "ai-studio", + "displayName": "AI Studio", + "description": "Podman Desktop AI Studio", "version": "0.1.0-next", "icon": "icon.png", "publisher": "redhat", diff --git a/packages/frontend/package.json b/packages/frontend/package.json index 2ca9ed9bc..812f2d249 100644 --- a/packages/frontend/package.json +++ b/packages/frontend/package.json @@ -1,6 +1,6 @@ { "name": "frontend-app", - "displayName": "UI for studio extension", + "displayName": "UI for AI Studio", "version": "0.1.0-next", "type": "module", "scripts": { From da29b7e425961d9556d71aee07616a7031187392 Mon Sep 17 00:00:00 2001 From: Luca Stocchi <49404737+lstocchi@users.noreply.github.com> Date: Fri, 26 Jan 2024 14:44:19 +0100 Subject: [PATCH 4/8] fix: return correct path when downloading model (#158) Signed-off-by: lstocchi --- .../src/managers/applicationManager.spec.ts | 50 ++++++++++++++++++- .../src/managers/applicationManager.ts | 47 +++++++++++------ 2 files changed, 79 insertions(+), 18 deletions(-) diff --git a/packages/backend/src/managers/applicationManager.spec.ts b/packages/backend/src/managers/applicationManager.spec.ts index 82af4538a..0491ffd41 100644 --- a/packages/backend/src/managers/applicationManager.spec.ts +++ b/packages/backend/src/managers/applicationManager.spec.ts @@ -1,5 +1,5 @@ import { type MockInstance, describe, expect, test, vi, beforeEach } from 'vitest'; -import type { ContainerAttachedInfo, ImageInfo, PodInfo } from './applicationManager'; +import type { ContainerAttachedInfo, DownloadModelResult, ImageInfo, PodInfo } from './applicationManager'; import { ApplicationManager } from './applicationManager'; import type { RecipeStatusRegistry } from '../registries/RecipeStatusRegistry'; import type { GitManager } from './gitManager'; @@ -733,17 +733,63 @@ describe('createApplicationPod', () => { }); }); -describe('restartContainerWhenEndpointIsUp', () => { +describe('doDownloadModelWrapper', () => { const manager = new ApplicationManager( '/home/user/aistudio', {} as unknown as GitManager, {} as unknown as RecipeStatusRegistry, {} as unknown as ModelsManager, ); + test('returning model path if model has been downloaded', async () => { + vi.spyOn(manager, 'doDownloadModel').mockImplementation( + ( + _modelId: string, + _url: string, + _taskUtil: RecipeStatusUtils, + callback: (message: DownloadModelResult) => void, + _destFileName?: string, + ) => { + callback({ + successful: true, + path: 'path', + }); + }, + ); + setTaskStateMock.mockReturnThis(); + const result = await manager.doDownloadModelWrapper('id', 'url', taskUtils); + expect(result).toBe('path'); + }); + test('rejecting with error message if model has NOT been downloaded', async () => { + vi.spyOn(manager, 'doDownloadModel').mockImplementation( + ( + _modelId: string, + _url: string, + _taskUtil: RecipeStatusUtils, + callback: (message: DownloadModelResult) => void, + _destFileName?: string, + ) => { + callback({ + successful: false, + error: 'error', + }); + }, + ); + setTaskStateMock.mockReturnThis(); + await expect(manager.doDownloadModelWrapper('id', 'url', taskUtils)).rejects.toThrowError('error'); + }); +}); + +describe('restartContainerWhenEndpointIsUp', () => { const containerAttachedInfo: ContainerAttachedInfo = { name: 'name', endPoint: 'endpoint', }; + const manager = new ApplicationManager( + '/home/user/aistudio', + {} as unknown as GitManager, + {} as unknown as RecipeStatusRegistry, + {} as unknown as ModelsManager, + ); test('restart container if endpoint is alive', async () => { vi.spyOn(utils, 'isEndpointAlive').mockResolvedValue(true); await manager.restartContainerWhenEndpointIsUp('engine', containerAttachedInfo); diff --git a/packages/backend/src/managers/applicationManager.ts b/packages/backend/src/managers/applicationManager.ts index df5a252a9..d21ff2d79 100644 --- a/packages/backend/src/managers/applicationManager.ts +++ b/packages/backend/src/managers/applicationManager.ts @@ -36,9 +36,16 @@ import { isEndpointAlive, timeout } from '../utils/utils'; export const CONFIG_FILENAME = 'ai-studio.yaml'; -interface DownloadModelResult { - result: 'ok' | 'failed'; - error?: string; +export type DownloadModelResult = DownloadModelSuccessfulResult | DownloadModelFailureResult; + +interface DownloadModelSuccessfulResult { + successful: true; + path: string; +} + +interface DownloadModelFailureResult { + successful: false; + error: string; } interface AIContainers { @@ -428,7 +435,20 @@ export class ApplicationManager { }, }); - return await this.doDownloadModelWrapper(model.id, model.url, taskUtil); + try { + return await this.doDownloadModelWrapper(model.id, model.url, taskUtil); + } catch (e) { + console.error(e); + taskUtil.setTask({ + id: model.id, + state: 'error', + name: `Downloading model ${model.name}`, + labels: { + 'model-pulling': model.id, + }, + }); + throw e; + } } else { taskUtil.setTask({ id: model.id, @@ -520,26 +540,20 @@ export class ApplicationManager { ): Promise { return new Promise((resolve, reject) => { const downloadCallback = (result: DownloadModelResult) => { - if (result.result) { + if (result.successful === true) { taskUtil.setTaskState(modelId, 'success'); - resolve(destFileName); - } else { + resolve(result.path); + } else if (result.successful === false) { taskUtil.setTaskState(modelId, 'error'); reject(result.error); } }; - if (fs.existsSync(destFileName)) { - taskUtil.setTaskState(modelId, 'success'); - taskUtil.setTaskProgress(modelId, 100); - return; - } - this.doDownloadModel(modelId, url, taskUtil, downloadCallback, destFileName); }); } - private doDownloadModel( + doDownloadModel( modelId: string, url: string, taskUtil: RecipeStatusUtils, @@ -581,7 +595,8 @@ export class ApplicationManager { //this.sendProgress(progressValue); if (progressValue === 100) { callback({ - result: 'ok', + successful: true, + path: destFile, }); } }); @@ -590,7 +605,7 @@ export class ApplicationManager { }); file.on('error', e => { callback({ - result: 'failed', + successful: false, error: e.message, }); }); From 0a85255d75277f9dd450c619101e71dbe11a09e3 Mon Sep 17 00:00:00 2001 From: Jeff MAURY Date: Fri, 26 Jan 2024 16:09:05 +0100 Subject: [PATCH 5/8] fix: add release workflow (#157) Fixes #156 Signed-off-by: Jeff MAURY --- .github/workflows/release.yaml | 136 +++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 .github/workflows/release.yaml diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 000000000..50bb153a4 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,136 @@ +# +# Copyright (C) 2024 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 + +name: release + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to release' + required: true + branch: + description: 'Branch to use for the release' + required: true + default: main +env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + +jobs: + + tag: + name: Tagging + runs-on: ubuntu-20.04 + outputs: + githubTag: ${{ steps.TAG_UTIL.outputs.githubTag}} + extVersion: ${{ steps.TAG_UTIL.outputs.extVersion}} + releaseId: ${{ steps.create_release.outputs.id}} + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.branch }} + - name: Generate tag utilities + id: TAG_UTIL + run: | + TAG_PATTERN=${{ github.event.inputs.version }} + echo "githubTag=v$TAG_PATTERN" >> ${GITHUB_OUTPUT} + echo "extVersion=$TAG_PATTERN" >> ${GITHUB_OUTPUT} + + - name: tag + run: | + git config --local user.name ${{ github.actor }} + + # Add the new version in package.json file + sed -i "s#version\":\ \"\(.*\)\",#version\":\ \"${{ steps.TAG_UTIL.outputs.extVersion }}\",#g" package.json + sed -i "s#version\":\ \"\(.*\)\",#version\":\ \"${{ steps.TAG_UTIL.outputs.extVersion }}\",#g" packages/backend/package.json + sed -i "s#version\":\ \"\(.*\)\",#version\":\ \"${{ steps.TAG_UTIL.outputs.extVersion }}\",#g" packages/frontend/package.json + git add package.json + git add packages/backend/package.json + git add packages/frontend/package.json + + # commit the changes + git commit -m "chore: 🥁 tagging ${{ steps.TAG_UTIL.outputs.githubTag }} 🥳" + echo "Tagging with ${{ steps.TAG_UTIL.outputs.githubTag }}" + git tag ${{ steps.TAG_UTIL.outputs.githubTag }} + git push origin ${{ steps.TAG_UTIL.outputs.githubTag }} + - name: Create Release + id: create_release + uses: ncipollo/release-action@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag: ${{ steps.TAG_UTIL.outputs.githubTag }} + name: ${{ steps.TAG_UTIL.outputs.githubTag }} + draft: true + prerelease: false + + build: + needs: [tag] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Get yarn cache directory path + id: yarn-cache-dir-path + run: echo "dir=$(npx yarn cache dir)" >> ${GITHUB_OUTPUT} + + - uses: actions/cache@v3 + id: yarn-cache + with: + path: ${{ steps.yarn-cache-dir-path.outputs.dir }} + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + restore-keys: | + ${{ runner.os }}-yarn- + + - name: Execute yarn + if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} + run: npx yarn --frozen-lockfile --network-timeout 180000 + + - name: Run Build + run: npx yarn build + + - name: Login to ghcr.io + run: podman login --username ${{ github.repository_owner }} --password ${{ secrets.GITHUB_TOKEN }} ghcr.io + + - name: Build Image + id: build-image + run: | + podman build -t ghcr.io/${{ github.repository_owner }}/ai-studio:${{ needs.tag.outputs.extVersion }} . + podman push ghcr.io/${{ github.repository_owner }}/ai-studio:${{ needs.tag.outputs.extVersion }} + podman tag ghcr.io/${{ github.repository_owner }}/ai-studio:${{ needs.tag.outputs.extVersion }} ghcr.io/${{ github.repository_owner }}/ai-studio:latest + podman push ghcr.io/${{ github.repository_owner }}/ai-studio:latest + + release: + needs: [tag, build] + name: Release + runs-on: ubuntu-20.04 + steps: + - name: id + run: echo the release id is ${{ needs.tag.outputs.releaseId}} + + - name: Publish release + uses: StuYarrow/publish-release@v1.1.2 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + id: ${{ needs.tag.outputs.releaseId}} + From 2c9fa6757f1219aebaf677b4a1593c8af346f084 Mon Sep 17 00:00:00 2001 From: Philippe Martin Date: Fri, 26 Jan 2024 17:03:56 +0100 Subject: [PATCH 6/8] feat: delete model (#134) * feat: delete model * add backend unit tests * feat: display confirm dialog before deleting * log error when error deleting model * feat: display error --- .../src/managers/modelsManager.spec.ts | 192 ++++++++++++++++-- .../backend/src/managers/modelsManager.ts | 47 ++++- packages/backend/src/studio-api-impl.ts | 4 + packages/frontend/src/lib/Modal.svelte | 62 ++++++ .../src/lib/button/ListItemButtonIcon.svelte | 51 +++++ packages/frontend/src/lib/dialog-utils.ts | 32 +++ .../lib/table/model/ModelColumnActions.svelte | 53 +++++ packages/frontend/src/pages/Models.svelte | 4 +- packages/shared/src/StudioAPI.ts | 7 +- packages/shared/src/models/IModelInfo.ts | 1 + 10 files changed, 428 insertions(+), 25 deletions(-) create mode 100644 packages/frontend/src/lib/Modal.svelte create mode 100644 packages/frontend/src/lib/button/ListItemButtonIcon.svelte create mode 100644 packages/frontend/src/lib/dialog-utils.ts create mode 100644 packages/frontend/src/lib/table/model/ModelColumnActions.svelte diff --git a/packages/backend/src/managers/modelsManager.spec.ts b/packages/backend/src/managers/modelsManager.spec.ts index 47f1faf55..83f301309 100644 --- a/packages/backend/src/managers/modelsManager.spec.ts +++ b/packages/backend/src/managers/modelsManager.spec.ts @@ -7,6 +7,27 @@ import type { Webview } from '@podman-desktop/api'; import type { CatalogManager } from './catalogManager'; import type { ModelInfo } from '@shared/src/models/IModelInfo'; +const mocks = vi.hoisted(() => { + return { + showErrorMessageMock: vi.fn(), + }; +}); + +vi.mock('@podman-desktop/api', () => { + return { + fs: { + createFileSystemWatcher: () => ({ + onDidCreate: vi.fn(), + onDidDelete: vi.fn(), + onDidChange: vi.fn(), + }), + }, + window: { + showErrorMessage: mocks.showErrorMessageMock, + }, + }; +}); + beforeEach(() => { vi.resetAllMocks(); }); @@ -34,7 +55,7 @@ function mockFiles(now: Date) { const existsSyncSpy = vi.spyOn(fs, 'existsSync'); existsSyncSpy.mockImplementation((path: string) => { if (process.platform === 'win32') { - expect(path).toBe('\\home\\user\\aistudio\\models'); + expect(path).toBe('C:\\home\\user\\aistudio\\models'); } else { expect(path).toBe('/home/user/aistudio/models'); } @@ -62,7 +83,13 @@ function mockFiles(now: Date) { test('getLocalModelsFromDisk should get models in local directory', () => { const now = new Date(); mockFiles(now); - const manager = new ModelsManager('/home/user/aistudio', {} as Webview, {} as CatalogManager); + let appdir: string; + if (process.platform === 'win32') { + appdir = 'C:\\home\\user\\aistudio'; + } else { + appdir = '/home/user/aistudio'; + } + const manager = new ModelsManager(appdir, {} as Webview, {} as CatalogManager); manager.getLocalModelsFromDisk(); expect(manager.getLocalModels()).toEqual([ { @@ -86,11 +113,17 @@ test('getLocalModelsFromDisk should return an empty array if the models folder d vi.spyOn(os, 'homedir').mockReturnValue('/home/user'); const existsSyncSpy = vi.spyOn(fs, 'existsSync'); existsSyncSpy.mockReturnValue(false); - const manager = new ModelsManager('/home/user/aistudio', {} as Webview, {} as CatalogManager); + let appdir: string; + if (process.platform === 'win32') { + appdir = 'C:\\home\\user\\aistudio'; + } else { + appdir = '/home/user/aistudio'; + } + const manager = new ModelsManager(appdir, {} as Webview, {} as CatalogManager); manager.getLocalModelsFromDisk(); expect(manager.getLocalModels()).toEqual([]); if (process.platform === 'win32') { - expect(existsSyncSpy).toHaveBeenCalledWith('\\home\\user\\aistudio\\models'); + expect(existsSyncSpy).toHaveBeenCalledWith('C:\\home\\user\\aistudio\\models'); } else { expect(existsSyncSpy).toHaveBeenCalledWith('/home/user/aistudio/models'); } @@ -100,20 +133,15 @@ test('loadLocalModels should post a message with the message on disk and on cata const now = new Date(); mockFiles(now); - vi.mock('@podman-desktop/api', () => { - return { - fs: { - createFileSystemWatcher: () => ({ - onDidCreate: vi.fn(), - onDidDelete: vi.fn(), - onDidChange: vi.fn(), - }), - }, - }; - }); const postMessageMock = vi.fn(); + let appdir: string; + if (process.platform === 'win32') { + appdir = 'C:\\home\\user\\aistudio'; + } else { + appdir = '/home/user/aistudio'; + } const manager = new ModelsManager( - '/home/user/aistudio', + appdir, { postMessage: postMessageMock, } as unknown as Webview, @@ -144,3 +172,135 @@ test('loadLocalModels should post a message with the message on disk and on cata ], }); }); + +test('deleteLocalModel deletes the model folder', async () => { + let appdir: string; + if (process.platform === 'win32') { + appdir = 'C:\\home\\user\\aistudio'; + } else { + appdir = '/home/user/aistudio'; + } + const now = new Date(); + mockFiles(now); + const rmSpy = vi.spyOn(fs.promises, 'rm'); + rmSpy.mockResolvedValue(); + const postMessageMock = vi.fn(); + const manager = new ModelsManager( + appdir, + { + postMessage: postMessageMock, + } as unknown as Webview, + { + getModels: () => { + return [ + { + id: 'model-id-1', + }, + ] as ModelInfo[]; + }, + } as CatalogManager, + ); + manager.getLocalModelsFromDisk(); + await manager.deleteLocalModel('model-id-1'); + // check that the model's folder is removed from disk + if (process.platform === 'win32') { + expect(rmSpy).toBeCalledWith('C:\\home\\user\\aistudio\\models\\model-id-1', { recursive: true }); + } else { + expect(rmSpy).toBeCalledWith('/home/user/aistudio/models/model-id-1', { recursive: true }); + } + expect(postMessageMock).toHaveBeenCalledTimes(2); + // check that a state is sent with the model being deleted + expect(postMessageMock).toHaveBeenCalledWith({ + id: 'new-local-models-state', + body: [ + { + file: { + creation: now, + file: 'model-id-1-model', + id: 'model-id-1', + size: 32000, + path: path.resolve(dirent[0].path, dirent[0].name, 'model-id-1-model'), + }, + id: 'model-id-1', + state: 'deleting', + }, + ], + }); + // check that a new state is sent with the model removed + expect(postMessageMock).toHaveBeenCalledWith({ + id: 'new-local-models-state', + body: [], + }); +}); + +test('deleteLocalModel fails to delete the model folder', async () => { + let appdir: string; + if (process.platform === 'win32') { + appdir = 'C:\\home\\user\\aistudio'; + } else { + appdir = '/home/user/aistudio'; + } + const now = new Date(); + mockFiles(now); + const rmSpy = vi.spyOn(fs.promises, 'rm'); + rmSpy.mockRejectedValue(new Error('failed')); + const postMessageMock = vi.fn(); + const manager = new ModelsManager( + appdir, + { + postMessage: postMessageMock, + } as unknown as Webview, + { + getModels: () => { + return [ + { + id: 'model-id-1', + }, + ] as ModelInfo[]; + }, + } as CatalogManager, + ); + manager.getLocalModelsFromDisk(); + await manager.deleteLocalModel('model-id-1'); + // check that the model's folder is removed from disk + if (process.platform === 'win32') { + expect(rmSpy).toBeCalledWith('C:\\home\\user\\aistudio\\models\\model-id-1', { recursive: true }); + } else { + expect(rmSpy).toBeCalledWith('/home/user/aistudio/models/model-id-1', { recursive: true }); + } + expect(postMessageMock).toHaveBeenCalledTimes(2); + // check that a state is sent with the model being deleted + expect(postMessageMock).toHaveBeenCalledWith({ + id: 'new-local-models-state', + body: [ + { + file: { + creation: now, + file: 'model-id-1-model', + id: 'model-id-1', + size: 32000, + path: path.resolve(dirent[0].path, dirent[0].name, 'model-id-1-model'), + }, + id: 'model-id-1', + state: 'deleting', + }, + ], + }); + // check that a new state is sent with the model non removed + expect(postMessageMock).toHaveBeenCalledWith({ + id: 'new-local-models-state', + body: [ + { + file: { + creation: now, + file: 'model-id-1-model', + id: 'model-id-1', + size: 32000, + path: path.resolve(dirent[0].path, dirent[0].name, 'model-id-1-model'), + }, + id: 'model-id-1', + }, + ], + }); + expect(mocks.showErrorMessageMock).toHaveBeenCalledOnce(); +}); diff --git a/packages/backend/src/managers/modelsManager.ts b/packages/backend/src/managers/modelsManager.ts index b2a266222..0c6e6224e 100644 --- a/packages/backend/src/managers/modelsManager.ts +++ b/packages/backend/src/managers/modelsManager.ts @@ -4,10 +4,14 @@ import * as path from 'node:path'; import { type Webview, fs as apiFs } from '@podman-desktop/api'; import { MSG_NEW_LOCAL_MODELS_STATE } from '@shared/Messages'; import type { CatalogManager } from './catalogManager'; +import type { ModelInfo } from '@shared/src/models/IModelInfo'; +import * as podmanDesktopApi from '@podman-desktop/api'; export class ModelsManager { #modelsDir: string; #localModels: Map; + // models being deleted + #deleted: Set; constructor( private appUserDirectory: string, @@ -16,16 +20,13 @@ export class ModelsManager { ) { this.#modelsDir = path.join(this.appUserDirectory, 'models'); this.#localModels = new Map(); + this.#deleted = new Set(); } async loadLocalModels() { const reloadLocalModels = async () => { this.getLocalModelsFromDisk(); - const models = this.getModelsInfo(); - await this.webview.postMessage({ - id: MSG_NEW_LOCAL_MODELS_STATE, - body: models, - }); + await this.sendModelsInfo(); }; const watcher = apiFs.createFileSystemWatcher(this.#modelsDir); watcher.onDidCreate(reloadLocalModels); @@ -39,7 +40,22 @@ export class ModelsManager { return this.catalogManager .getModels() .filter(m => this.#localModels.has(m.id)) - .map(m => ({ ...m, file: this.#localModels.get(m.id) })); + .map( + m => + ({ + ...m, + file: this.#localModels.get(m.id), + state: this.#deleted.has(m.id) ? 'deleting' : undefined, + }) as ModelInfo, + ); + } + + async sendModelsInfo() { + const models = this.getModelsInfo(); + await this.webview.postMessage({ + id: MSG_NEW_LOCAL_MODELS_STATE, + body: models, + }); } getLocalModelsFromDisk(): void { @@ -85,7 +101,26 @@ export class ModelsManager { return path.resolve(this.#modelsDir, modelId, info.file); } + getLocalModelFolder(modelId: string): string { + return path.resolve(this.#modelsDir, modelId); + } + getLocalModels(): LocalModelInfo[] { return Array.from(this.#localModels.values()); } + + async deleteLocalModel(modelId: string): Promise { + const modelDir = this.getLocalModelFolder(modelId); + this.#deleted.add(modelId); + await this.sendModelsInfo(); + try { + await fs.promises.rm(modelDir, { recursive: true }); + this.#localModels.delete(modelId); + } catch (err: unknown) { + await podmanDesktopApi.window.showErrorMessage(`Error deleting model ${modelId}. ${String(err)}`); + } finally { + this.#deleted.delete(modelId); + await this.sendModelsInfo(); + } + } } diff --git a/packages/backend/src/studio-api-impl.ts b/packages/backend/src/studio-api-impl.ts index cc340f9f4..e166faa69 100644 --- a/packages/backend/src/studio-api-impl.ts +++ b/packages/backend/src/studio-api-impl.ts @@ -107,4 +107,8 @@ export class StudioApiImpl implements StudioAPI { async getCatalog(): Promise { return this.catalogManager.getCatalog(); } + + async deleteLocalModel(modelId: string): Promise { + await this.modelsManager.deleteLocalModel(modelId); + } } diff --git a/packages/frontend/src/lib/Modal.svelte b/packages/frontend/src/lib/Modal.svelte new file mode 100644 index 000000000..16c169244 --- /dev/null +++ b/packages/frontend/src/lib/Modal.svelte @@ -0,0 +1,62 @@ + + + + + + + + + diff --git a/packages/frontend/src/lib/button/ListItemButtonIcon.svelte b/packages/frontend/src/lib/button/ListItemButtonIcon.svelte new file mode 100644 index 000000000..66e083598 --- /dev/null +++ b/packages/frontend/src/lib/button/ListItemButtonIcon.svelte @@ -0,0 +1,51 @@ + + + diff --git a/packages/frontend/src/lib/dialog-utils.ts b/packages/frontend/src/lib/dialog-utils.ts new file mode 100644 index 000000000..cb97d595d --- /dev/null +++ b/packages/frontend/src/lib/dialog-utils.ts @@ -0,0 +1,32 @@ +/********************************************************************** + * Copyright (C) 2023 Red Hat, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + ***********************************************************************/ + +export function tabWithinParent(e: KeyboardEvent, parent: HTMLDivElement): void { + // trap focus within parent element + const nodes = parent.querySelectorAll('*'); + const tabbable = Array.from(nodes).filter(n => n.tabIndex >= 0); + + let index = tabbable.indexOf(document.activeElement as HTMLElement); + if (index === -1 && e.shiftKey) index = 0; + + index += tabbable.length + (e.shiftKey ? -1 : 1); + index %= tabbable.length; + + tabbable[index].focus(); + e.preventDefault(); +} diff --git a/packages/frontend/src/lib/table/model/ModelColumnActions.svelte b/packages/frontend/src/lib/table/model/ModelColumnActions.svelte new file mode 100644 index 000000000..6c596c721 --- /dev/null +++ b/packages/frontend/src/lib/table/model/ModelColumnActions.svelte @@ -0,0 +1,53 @@ + + + deleteModel()} + title="Delete Model" + enabled={!object.state} +/> + +{#if deleteConfirmVisible} + +
+

Delete a model

+ + +
+
+ The folder on disk containing the model will be deleted, it contains: +
    +
  • {object.file?.file}
  • +
+ +
+ + +
+
+
+{/if} diff --git a/packages/frontend/src/pages/Models.svelte b/packages/frontend/src/pages/Models.svelte index 4420ec43b..964d97476 100644 --- a/packages/frontend/src/pages/Models.svelte +++ b/packages/frontend/src/pages/Models.svelte @@ -15,7 +15,8 @@ import Card from '/@/lib/Card.svelte'; import { modelsPulling } from '../stores/recipe'; import { onMount } from 'svelte'; import ModelColumnSize from '../lib/table/model/ModelColumnSize.svelte'; - import ModelColumnCreation from '../lib/table/model/ModelColumnCreation.svelte'; +import ModelColumnCreation from '../lib/table/model/ModelColumnCreation.svelte'; +import ModelColumnActions from '../lib/table/model/ModelColumnActions.svelte'; const columns: Column[] = [ new Column('Name', { width: '3fr', renderer: ModelColumnName }), @@ -25,6 +26,7 @@ const columns: Column[] = [ new Column('Registry', { width: '2fr', renderer: ModelColumnRegistry }), new Column('Popularity', { width: '1fr', renderer: ModelColumnPopularity }), new Column('License', { width: '2fr', renderer: ModelColumnLicense }), + new Column('Actions', { align: 'right', width: '1fr', renderer: ModelColumnActions }), ]; const row = new Row({}); diff --git a/packages/shared/src/StudioAPI.ts b/packages/shared/src/StudioAPI.ts index 5ea1990eb..98928932b 100644 --- a/packages/shared/src/StudioAPI.ts +++ b/packages/shared/src/StudioAPI.ts @@ -12,10 +12,13 @@ export abstract class StudioAPI { abstract pullApplication(recipeId: string): Promise; abstract openURL(url: string): Promise; /** - * Get the information of models saved locally into the extension's storage directory + * Get the information of models saved locally into the user's directory */ abstract getLocalModels(): Promise; - + /** + * Delete the folder containing the model from local storage + */ + abstract deleteLocalModel(modelId: string): Promise; abstract startPlayground(modelId: string): Promise; abstract stopPlayground(modelId: string): Promise; abstract askPlayground(modelId: string, prompt: string): Promise; diff --git a/packages/shared/src/models/IModelInfo.ts b/packages/shared/src/models/IModelInfo.ts index 422078fce..ac5eea068 100644 --- a/packages/shared/src/models/IModelInfo.ts +++ b/packages/shared/src/models/IModelInfo.ts @@ -10,4 +10,5 @@ export interface ModelInfo { license: string; url: string; file?: LocalModelInfo; + state?: 'deleting'; } From a431851278e601f59a6bcefb159e8dae24ddaffc Mon Sep 17 00:00:00 2001 From: Luca Stocchi <49404737+lstocchi@users.noreply.github.com> Date: Fri, 26 Jan 2024 18:12:14 +0100 Subject: [PATCH 7/8] fix: delete containers outside the pod (#160) Signed-off-by: lstocchi --- .../src/managers/applicationManager.spec.ts | 65 +++++++++++++++++++ .../src/managers/applicationManager.ts | 63 ++++++++++-------- 2 files changed, 101 insertions(+), 27 deletions(-) diff --git a/packages/backend/src/managers/applicationManager.spec.ts b/packages/backend/src/managers/applicationManager.spec.ts index 0491ffd41..dca3a71d2 100644 --- a/packages/backend/src/managers/applicationManager.spec.ts +++ b/packages/backend/src/managers/applicationManager.spec.ts @@ -26,6 +26,7 @@ const mocks = vi.hoisted(() => { replicatePodmanContainerMock: vi.fn(), startContainerMock: vi.fn(), startPod: vi.fn(), + deleteContainerMock: vi.fn(), }; }); vi.mock('../models/AIConfig', () => ({ @@ -41,6 +42,7 @@ vi.mock('@podman-desktop/api', () => ({ replicatePodmanContainer: mocks.replicatePodmanContainerMock, startContainer: mocks.startContainerMock, startPod: mocks.startPod, + deleteContainer: mocks.deleteContainerMock, }, })); let setTaskMock: MockInstance; @@ -731,6 +733,20 @@ describe('createApplicationPod', () => { name: 'Creating application', }); }); + test('throw if createAndAddContainersToPod fails', async () => { + const pod: PodInfo = { + engineId: 'engine', + Id: 'id', + }; + vi.spyOn(manager, 'createPod').mockResolvedValue(pod); + vi.spyOn(manager, 'createAndAddContainersToPod').mockRejectedValue('error'); + await expect(() => manager.createApplicationPod(images, 'path', taskUtils)).rejects.toThrowError('error'); + expect(setTaskMock).toBeCalledWith({ + id: 'id', + state: 'error', + name: 'Creating application', + }); + }); }); describe('doDownloadModelWrapper', () => { @@ -829,3 +845,52 @@ describe('runApplication', () => { }); }); }); + +describe('createAndAddContainersToPod', () => { + const manager = new ApplicationManager( + '/home/user/aistudio', + {} as unknown as GitManager, + {} as unknown as RecipeStatusRegistry, + {} as unknown as ModelsManager, + ); + const pod: PodInfo = { + engineId: 'engine', + Id: 'id', + }; + const imageInfo1: ImageInfo = { + id: 'id', + appName: 'appName', + modelService: false, + ports: ['8080'], + }; + test('check that after the creation and copy inside the pod, the container outside the pod is actually deleted', async () => { + mocks.createContainerMock.mockResolvedValue({ + id: 'container-1', + }); + vi.spyOn(manager, 'getRandomName').mockReturnValue('name'); + await manager.createAndAddContainersToPod(pod, [imageInfo1], 'path'); + expect(mocks.createContainerMock).toBeCalledWith('engine', { + Image: 'id', + Detach: true, + HostConfig: { + AutoRemove: true, + }, + Env: [], + start: false, + }); + expect(mocks.replicatePodmanContainerMock).toBeCalledWith( + { + id: 'container-1', + engineId: 'engine', + }, + { + engineId: 'engine', + }, + { + pod: 'id', + name: 'name', + }, + ); + expect(mocks.deleteContainerMock).toBeCalledWith('engine', 'container-1'); + }); +}); diff --git a/packages/backend/src/managers/applicationManager.ts b/packages/backend/src/managers/applicationManager.ts index d21ff2d79..be48ebc7f 100644 --- a/packages/backend/src/managers/applicationManager.ts +++ b/packages/backend/src/managers/applicationManager.ts @@ -171,7 +171,18 @@ export class ApplicationManager { name: `Creating application`, }); - const attachedContainers = await this.createAndAddContainersToPod(pod, images, modelPath); + let attachedContainers: ContainerAttachedInfo[]; + try { + attachedContainers = await this.createAndAddContainersToPod(pod, images, modelPath); + } catch (e) { + console.error(`error when creating pod ${pod.Id}`); + taskUtil.setTask({ + id: pod.Id, + state: 'error', + name: 'Creating application', + }); + throw e; + } taskUtil.setTask({ id: pod.Id, @@ -219,35 +230,33 @@ export class ApplicationManager { envs = [`MODEL_ENDPOINT=${endPoint}`]; } } - const createdContainer = await containerEngine - .createContainer(pod.engineId, { - Image: image.id, - Detach: true, - HostConfig: hostConfig, - Env: envs, - start: false, - }) - .catch((e: unknown) => console.error(e)); + const createdContainer = await containerEngine.createContainer(pod.engineId, { + Image: image.id, + Detach: true, + HostConfig: hostConfig, + Env: envs, + start: false, + }); // now, for each container, put it in the pod if (createdContainer) { - try { - const podifiedName = this.getRandomName(`${image.appName}-podified`); - await containerEngine.replicatePodmanContainer( - { - id: createdContainer.id, - engineId: pod.engineId, - }, - { engineId: pod.engineId }, - { pod: pod.Id, name: podifiedName }, - ); - containers.push({ - name: podifiedName, - endPoint, - }); - } catch (error) { - console.error(error); - } + const podifiedName = this.getRandomName(`${image.appName}-podified`); + await containerEngine.replicatePodmanContainer( + { + id: createdContainer.id, + engineId: pod.engineId, + }, + { engineId: pod.engineId }, + { pod: pod.Id, name: podifiedName }, + ); + containers.push({ + name: podifiedName, + endPoint, + }); + // remove the external container + await containerEngine.deleteContainer(pod.engineId, createdContainer.id); + } else { + throw new Error(`failed at creating container for image ${image.id}`); } }), ); From 6ba8fbd1c5c1889a59feb43f6543add4a149f608 Mon Sep 17 00:00:00 2001 From: Luca Stocchi <49404737+lstocchi@users.noreply.github.com> Date: Mon, 29 Jan 2024 14:07:04 +0100 Subject: [PATCH 8/8] chore: adding summarizer app (#164) * chore: adding summarizer app Signed-off-by: lstocchi * fix: update summarizer readme Signed-off-by: lstocchi --------- Signed-off-by: lstocchi --- packages/backend/src/ai.json | 39 +++++++++++++++--------------------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/packages/backend/src/ai.json b/packages/backend/src/ai.json index 779726806..726f998c6 100644 --- a/packages/backend/src/ai.json +++ b/packages/backend/src/ai.json @@ -12,9 +12,22 @@ "config": "chatbot/ai-studio.yaml", "readme": "# Locallm\n\nThis repo contains artifacts that can be used to build and run LLM (Large Language Model) services locally on your Mac using podman. These containerized LLM services can be used to help developers quickly prototype new LLM based applications, without the need for relying on any other externally hosted services. Since they are already containerized, it also helps developers move from their prototype to production quicker. \n\n## Current Locallm Services: \n\n* [Chatbot](#chatbot)\n* [Text Summarization](#text-summarization)\n* [Fine-tuning](#fine-tuning)\n\n### Chatbot\n\nA simple chatbot using the gradio UI. Learn how to build and run this model service here: [Chatbot](/chatbot/).\n\n### Text Summarization\n\nAn LLM app that can summarize arbitrarily long text inputs. Learn how to build and run this model service here: [Text Summarization](/summarizer/).\n\n### Fine Tuning \n\nThis application allows a user to select a model and a data set they'd like to fine-tune that model on. Once the application finishes, it outputs a new fine-tuned model for the user to apply to other LLM services. Learn how to build and run this model training job here: [Fine-tuning](/finetune/).\n\n## Architecture\n![](https://raw.githubusercontent.com/MichaelClifford/locallm/main/assets/arch.jpg)\n\nThe diagram above indicates the general architecture for each of the individual model services contained in this repo. The core code available here is the \"LLM Task Service\" and the \"API Server\", bundled together under `model_services`. With an appropriately chosen model downloaded onto your host,`model_services/builds` contains the Containerfiles required to build an ARM or an x86 (with CUDA) image depending on your need. These model services are intended to be light-weight and run with smaller hardware footprints (given the Locallm name), but they can be run on any hardware that supports containers and scaled up if needed.\n\nWe also provide demo \"AI Applications\" under `ai_applications` for each model service to provide an example of how a developers could interact with the model service for their own needs. ", "models": [ - "llama-2-7b-chat.Q5_K_S", - "albedobase-xl-1.3", - "sdxl-turbo" + "llama-2-7b-chat.Q5_K_S" + ] + }, + { + "id": "summarizer", + "description" : "Summarizer application", + "name" : "Summarizer", + "repository": "https://github.com/redhat-et/locallm", + "icon": "natural-language-processing", + "categories": [ + "natural-language-processing" + ], + "config": "summarizer/ai-studio.yaml", + "readme": "# Summarizer\n\nThis model service is intended be be used for text summarization tasks. This service can ingest an arbitrarily long text input. If the input length is less than the models maximum context window it will summarize the input directly. If the input is longer than the maximum context window, the input will be divided into appropriately sized chunks. Each chunk will be summarized and a final \"summary of summaries\" will be the services final output. ", + "models": [ + "llama-2-7b-chat.Q5_K_S" ] } ], @@ -28,26 +41,6 @@ "popularity": 3, "license": "?", "url": "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf" - }, - { - "id": "albedobase-xl-1.3", - "name": "AlbedoBase XL 1.3", - "description": "Stable Diffusion XL has 6.6 billion parameters, which is about 6.6 times more than the SD v1.5 version. I believe that this is not just a number, but a number that can lead to a significant improvement in performance. It has been a while since we realized that the overall performance of SD v1.5 has improved beyond imagination thanks to the explosive contributions of our community. Therefore, I am working on completing this AlbedoBase XL model in order to optimally reproduce the performance improvement that occurred in v1.5 in this XL version as well. My goal is to directly test the performance of all Checkpoints and LoRAs that are publicly uploaded to Civitai, and merge only the resources that are judged to be optimal after passing through several filters. This will surpass the performance of image-generating AI of companies such as Midjourney. As of now, AlbedoBase XL v0.4 has merged exactly 55 selected checkpoints and 138 LoRAs.", - "hw": "CPU", - "registry": "Civital", - "popularity": 3, - "license": "openrail++", - "url": "" - }, - { - "id": "sdxl-turbo", - "name": "SDXL Turbo", - "description": "SDXL Turbo achieves state-of-the-art performance with a new distillation technology, enabling single-step image generation with unprecedented quality, reducing the required step count from 50 to just one.", - "hw": "CPU", - "registry": "Hugging Face", - "popularity": 3, - "license": "sai-c-community", - "url": "" } ], "categories": [