diff --git a/backend/src/langchain/langchain.module.ts b/backend/src/langchain/langchain.module.ts index 2d42062a..6b1bece1 100644 --- a/backend/src/langchain/langchain.module.ts +++ b/backend/src/langchain/langchain.module.ts @@ -5,7 +5,7 @@ import { BaseChatModel } from "@langchain/core/language_models/chat_models"; type ModelList = { [key: string]: string[]; -} +}; const modelList: ModelList = { ollama: [ @@ -18,22 +18,18 @@ const modelList: ModelList = { "starling-lm", "solar", ], - openai: [ - "gpt-3.5-turbo", - "gpt-4o-mini" - ] -} + openai: ["gpt-3.5-turbo", "gpt-4o-mini"], +}; const chatModelFactory = { provide: "ChatModel", useFactory: () => { - const modelType = process.env.YORKIE_INTELLIGENCE; - try{ + try { const [provider, model] = modelType.split(":", 2); - let chatModel: BaseChatModel | ChatOllama + let chatModel: BaseChatModel | ChatOllama; - if (modelList[provider] && modelList[provider].includes(model)){ + if (modelList[provider] && modelList[provider].includes(model)) { if (provider === "ollama") { chatModel = new ChatOllama({ model: model, @@ -52,7 +48,6 @@ const chatModelFactory = { } catch { throw new Error(`${modelType} is not found. please check your model name`); } - }, };