0);
const [model, setModel] = useState(DEFAULT_MODEL);
+ const [provider, setProvider] = useState(DEFAULT_PROVIDER);
const { showChat } = useStore(chatStore);
@@ -182,7 +183,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
* manually reset the input and we'd have to manually pass in file attachments. However, those
* aren't relevant here.
*/
- append({ role: 'user', content: `[Model: ${model}]\n\n${diff}\n\n${_input}` });
+ append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${diff}\n\n${_input}` });
/**
* After sending a new message we reset all modifications since the model
@@ -190,7 +191,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
*/
workbenchStore.resetAllFileModifications();
} else {
- append({ role: 'user', content: `[Model: ${model}]\n\n${_input}` });
+ append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${_input}` });
}
setInput('');
@@ -215,6 +216,8 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
sendMessage={sendMessage}
model={model}
setModel={setModel}
+ provider={provider}
+ setProvider={setProvider}
messageRef={messageRef}
scrollRef={scrollRef}
handleInputChange={handleInputChange}
diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts
index de3d5bfa8..70bb3a917 100644
--- a/app/lib/.server/llm/stream-text.ts
+++ b/app/lib/.server/llm/stream-text.ts
@@ -24,42 +24,51 @@ export type Messages = Message[];
export type StreamingOptions = Omit
[0], 'model'>;
-function extractModelFromMessage(message: Message): { model: string; content: string } {
+function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
const modelRegex = /^\[Model: (.*?)\]\n\n/;
- const match = message.content.match(modelRegex);
+ const providerRegex = /\[Provider: (.*?)\]\n\n/;
- if (match) {
- const model = match[1];
- const content = message.content.replace(modelRegex, '');
- return { model, content };
- }
+ // Extract model
+ const modelMatch = message.content.match(modelRegex);
+ const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
- // Default model if not specified
- return { model: DEFAULT_MODEL, content: message.content };
+ // Extract provider
+ const providerMatch = message.content.match(providerRegex);
+ const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
+
+ // Remove model and provider lines from content
+ const cleanedContent = message.content
+ .replace(modelRegex, '')
+ .replace(providerRegex, '')
+ .trim();
+
+ return { model, provider, content: cleanedContent };
}
export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
let currentModel = DEFAULT_MODEL;
+ let currentProvider = DEFAULT_PROVIDER;
+
const processedMessages = messages.map((message) => {
if (message.role === 'user') {
- const { model, content } = extractModelFromMessage(message);
- if (model && MODEL_LIST.find((m) => m.name === model)) {
- currentModel = model; // Update the current model
+ const { model, provider, content } = extractPropertiesFromMessage(message);
+
+ if (MODEL_LIST.find((m) => m.name === model)) {
+ currentModel = model;
}
+
+ currentProvider = provider;
+
return { ...message, content };
}
- return message;
- });
- const provider = MODEL_LIST.find((model) => model.name === currentModel)?.provider || DEFAULT_PROVIDER;
+ return message; // No changes for non-user messages
+ });
return _streamText({
- model: getModel(provider, currentModel, env),
+ model: getModel(currentProvider, currentModel, env),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
- // headers: {
- // 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
- // },
messages: convertToCoreMessages(processedMessages),
...options,
});