From 5b5940ce32540234efe0fec0015dd798ddd0a75e Mon Sep 17 00:00:00 2001 From: axel7083 <42176370+axel7083@users.noreply.github.com> Date: Tue, 11 Jun 2024 17:31:02 +0200 Subject: [PATCH 1/2] fix: enum inference type Signed-off-by: axel7083 <42176370+axel7083@users.noreply.github.com> --- packages/shared/src/models/IInference.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/shared/src/models/IInference.ts b/packages/shared/src/models/IInference.ts index b872affc8..a2fec45bd 100644 --- a/packages/shared/src/models/IInference.ts +++ b/packages/shared/src/models/IInference.ts @@ -18,8 +18,8 @@ import type { ModelInfo } from './IModelInfo'; export enum InferenceType { - LLAMA_CPP = 'llamacpp', - WHISPER_CPP = 'whispercpp', + LLAMA_CPP = 'llama-cpp', + WHISPER_CPP = 'whisper-cpp', NONE = 'none', } From e588cbd2c79d078a2989fa655e8070575a4004f6 Mon Sep 17 00:00:00 2001 From: axel7083 <42176370+axel7083@users.noreply.github.com> Date: Tue, 11 Jun 2024 17:47:09 +0200 Subject: [PATCH 2/2] fix: inference utils tests Signed-off-by: axel7083 <42176370+axel7083@users.noreply.github.com> --- packages/backend/src/utils/inferenceUtils.spec.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/backend/src/utils/inferenceUtils.spec.ts b/packages/backend/src/utils/inferenceUtils.spec.ts index 71784756f..df7cb601b 100644 --- a/packages/backend/src/utils/inferenceUtils.spec.ts +++ b/packages/backend/src/utils/inferenceUtils.spec.ts @@ -91,8 +91,8 @@ describe('parseInferenceType', () => { expect(parseInferenceType(undefined)).toBe(InferenceType.NONE); }); - test('llamacpp should return the proper InferenceType.LLAMA_CPP', () => { - expect(parseInferenceType('llamacpp')).toBe(InferenceType.LLAMA_CPP); + test('llama-cpp should return the proper InferenceType.LLAMA_CPP', () => { + expect(parseInferenceType('llama-cpp')).toBe(InferenceType.LLAMA_CPP); }); }); @@ -115,7 +115,7 @@ describe('getInferenceType', () => { expect( getInferenceType([ { - backend: 'llamacpp', + backend: 'llama-cpp', } as unknown as ModelInfo, ]), ).toBe(InferenceType.LLAMA_CPP); @@ -125,10 +125,10 @@ describe('getInferenceType', () => { expect( getInferenceType([ { - backend: 'llamacpp', + backend: 'llama-cpp', }, { - backend: 'llamacpp', + backend: 'llama-cpp', }, ] as unknown as ModelInfo[]), ).toBe(InferenceType.LLAMA_CPP); @@ -138,10 +138,10 @@ describe('getInferenceType', () => { expect( getInferenceType([ { - backend: 'llamacpp', + backend: 'llama-cpp', }, { - backend: 'whispercpp', + backend: 'whisper-cpp', }, ] as unknown as ModelInfo[]), ).toBe(InferenceType.NONE);