From 11c41e7381750b828841bc3d92d187d91252329d Mon Sep 17 00:00:00 2001 From: Enrico Ros Date: Fri, 7 Jun 2024 14:18:01 -0700 Subject: [PATCH] Function call: increase debug verbosity --- src/modules/llms/server/openai/openai.router.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/modules/llms/server/openai/openai.router.ts b/src/modules/llms/server/openai/openai.router.ts index 6a3a8f659..84ff7f30c 100644 --- a/src/modules/llms/server/openai/openai.router.ts +++ b/src/modules/llms/server/openai/openai.router.ts @@ -270,7 +270,7 @@ export const llmOpenAIRouter = createTRPCRouter({ .output(llmsChatGenerateWithFunctionsOutputSchema) .mutation(async ({ input }) => { - const { access, model, history, functions, forceFunctionName } = input; + const { access, model, history, functions, forceFunctionName, context } = input; const isFunctionsCall = !!functions && functions.length > 0; const completionsBody = openAIChatCompletionPayload(access.dialect, model, history, isFunctionsCall ? functions : null, forceFunctionName ?? null, 1, false); @@ -280,7 +280,7 @@ export const llmOpenAIRouter = createTRPCRouter({ // expect a single output if (wireCompletions?.choices?.length !== 1) { - console.error(`[POST] llmOpenAI.chatGenerateWithFunctions: ${access.dialect}: unexpected output${forceFunctionName ? ` (fn: ${forceFunctionName})` : ''}:`, wireCompletions?.choices?.length); + console.error(`[POST] llmOpenAI.chatGenerateWithFunctions: ${access.dialect}: ${context?.name || 'no context'}: unexpected output${forceFunctionName ? ` (fn: ${forceFunctionName})` : ''}:`, model.id, wireCompletions?.choices); throw new TRPCError({ code: 'UNPROCESSABLE_CONTENT', message: `[OpenAI Issue] Expected 1 completion, got ${wireCompletions?.choices?.length}`,