Skip to content

Commit

Permalink
Merge branch 'refs/heads/main-stable'
Browse files Browse the repository at this point in the history
  • Loading branch information
enricoros committed Jun 7, 2024
2 parents 0807744 + 1744b5b commit 5066336
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 7 deletions.
5 changes: 3 additions & 2 deletions src/modules/aifn/autosuggestions/autoSuggestions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ export function autoSuggestions(conversationId: string, assistantMessageId: stri

// Follow-up: Auto-Diagrams
if (suggestDiagrams) {
void llmChatGenerateOrThrow(funcLLMId, [
llmChatGenerateOrThrow(funcLLMId, [
{ role: 'system', content: systemMessage.text },
{ role: 'user', content: userMessage.text },
{ role: 'assistant', content: assistantMessageText },
Expand All @@ -110,7 +110,8 @@ export function autoSuggestions(conversationId: string, assistantMessageId: stri
}
}
}).catch(err => {
console.error('autoSuggestions::diagram:', err);
// Likely the model did not support function calling
// console.log('autoSuggestions: diagram error:', err);
});
}

Expand Down
6 changes: 5 additions & 1 deletion src/modules/llms/llm.client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { hasGoogleAnalytics } from '~/common/components/GoogleAnalytics';
import type { ModelDescriptionSchema } from './server/llm.server.types';
import type { OpenAIWire } from './server/openai/openai.wiretypes';
import type { StreamingClientUpdate } from './vendors/unifiedStreamingClient';
import { DLLM, DLLMId, DModelSource, DModelSourceId, LLM_IF_OAI_Chat, useModelsStore } from './store-llms';
import { DLLM, DLLMId, DModelSource, DModelSourceId, LLM_IF_OAI_Chat, LLM_IF_OAI_Fn, useModelsStore } from './store-llms';
import { FALLBACK_LLM_TEMPERATURE } from './vendors/openai/openai.vendor';
import { findAccessForSourceOrThrow, findVendorForLlmOrThrow } from './vendors/vendors.registry';

Expand Down Expand Up @@ -129,6 +129,10 @@ export async function llmChatGenerateOrThrow<TSourceSetup = unknown, TAccess = u
// id to DLLM and vendor
const { llm, vendor } = findVendorForLlmOrThrow<TSourceSetup, TAccess, TLLMOptions>(llmId);

// if the model does not support function calling and we're trying to force a function, throw
if (forceFunctionName && !llm.interfaces.includes(LLM_IF_OAI_Fn))
throw new Error(`Model ${llmId} does not support function calling`);

// FIXME: relax the forced cast
const options = llm.options as TLLMOptions;

Expand Down
8 changes: 4 additions & 4 deletions src/modules/llms/server/openai/models.data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ const _knownMistralChatModels: ManualMappings = [
label: 'Codestral (2405)',
description: 'Designed and optimized for code generation tasks.',
contextWindow: 32768,
interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn],
interfaces: [LLM_IF_OAI_Chat],
pricing: { chatIn: 1, chatOut: 3 },
},
{
Expand All @@ -397,7 +397,7 @@ const _knownMistralChatModels: ManualMappings = [
// copied
description: 'Designed and optimized for code generation tasks.',
contextWindow: 32768,
interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn],
interfaces: [LLM_IF_OAI_Chat],
pricing: { chatIn: 1, chatOut: 3 },
},

Expand Down Expand Up @@ -430,7 +430,7 @@ const _knownMistralChatModels: ManualMappings = [
label: 'Open Mixtral 8x22B (2404)',
description: 'Mixtral 8x22B model',
contextWindow: 65536,
interfaces: [LLM_IF_OAI_Chat],
interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn],
pricing: { chatIn: 2, chatOut: 6 },
},
{
Expand All @@ -441,7 +441,7 @@ const _knownMistralChatModels: ManualMappings = [
// copied
description: 'Mixtral 8x22B model',
contextWindow: 65536,
interfaces: [LLM_IF_OAI_Chat],
interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn],
pricing: { chatIn: 2, chatOut: 6 },
},
// Medium (Deprecated)
Expand Down

0 comments on commit 5066336

Please sign in to comment.