diff --git a/packages/backend/src/utils/inferenceUtils.spec.ts b/packages/backend/src/utils/inferenceUtils.spec.ts index 71784756f..df7cb601b 100644 --- a/packages/backend/src/utils/inferenceUtils.spec.ts +++ b/packages/backend/src/utils/inferenceUtils.spec.ts @@ -91,8 +91,8 @@ describe('parseInferenceType', () => { expect(parseInferenceType(undefined)).toBe(InferenceType.NONE); }); - test('llamacpp should return the proper InferenceType.LLAMA_CPP', () => { - expect(parseInferenceType('llamacpp')).toBe(InferenceType.LLAMA_CPP); + test('llama-cpp should return the proper InferenceType.LLAMA_CPP', () => { + expect(parseInferenceType('llama-cpp')).toBe(InferenceType.LLAMA_CPP); }); }); @@ -115,7 +115,7 @@ describe('getInferenceType', () => { expect( getInferenceType([ { - backend: 'llamacpp', + backend: 'llama-cpp', } as unknown as ModelInfo, ]), ).toBe(InferenceType.LLAMA_CPP); @@ -125,10 +125,10 @@ describe('getInferenceType', () => { expect( getInferenceType([ { - backend: 'llamacpp', + backend: 'llama-cpp', }, { - backend: 'llamacpp', + backend: 'llama-cpp', }, ] as unknown as ModelInfo[]), ).toBe(InferenceType.LLAMA_CPP); @@ -138,10 +138,10 @@ describe('getInferenceType', () => { expect( getInferenceType([ { - backend: 'llamacpp', + backend: 'llama-cpp', }, { - backend: 'whispercpp', + backend: 'whisper-cpp', }, ] as unknown as ModelInfo[]), ).toBe(InferenceType.NONE); diff --git a/packages/shared/src/models/IInference.ts b/packages/shared/src/models/IInference.ts index b872affc8..a2fec45bd 100644 --- a/packages/shared/src/models/IInference.ts +++ b/packages/shared/src/models/IInference.ts @@ -18,8 +18,8 @@ import type { ModelInfo } from './IModelInfo'; export enum InferenceType { - LLAMA_CPP = 'llamacpp', - WHISPER_CPP = 'whispercpp', + LLAMA_CPP = 'llama-cpp', + WHISPER_CPP = 'whisper-cpp', NONE = 'none', }