Skip to content

Commit

Permalink
Merge pull request #188 from TommyHolmberg/respect-provider-choice
Browse files Browse the repository at this point in the history
fix: respect provider choice from UI
  • Loading branch information
coleam00 authored Nov 9, 2024
2 parents d18517a + f30612d commit 936a9c0
Show file tree
Hide file tree
Showing 5 changed files with 61 additions and 45 deletions.
8 changes: 7 additions & 1 deletion app/components/chat/BaseChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ const EXAMPLE_PROMPTS = [

const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]

const ModelSelector = ({ model, setModel, modelList, providerList, provider, setProvider }) => {
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
return (
<div className="mb-2 flex gap-2">
<select
Expand Down Expand Up @@ -80,6 +80,8 @@ interface BaseChatProps {
input?: string;
model: string;
setModel: (model: string) => void;
provider: string;
setProvider: (provider: string) => void;
handleStop?: () => void;
sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
Expand All @@ -101,6 +103,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
input = '',
model,
setModel,
provider,
setProvider,
sendMessage,
handleInputChange,
enhancePrompt,
Expand Down Expand Up @@ -193,6 +197,8 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
model={model}
setModel={setModel}
modelList={MODEL_LIST}
provider={provider}
setProvider={setProvider}
providerList={providerList}
provider={provider}
setProvider={setProvider}
Expand Down
9 changes: 6 additions & 3 deletions app/components/chat/Chat.client.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { useChatHistory } from '~/lib/persistence';
import { chatStore } from '~/lib/stores/chat';
import { workbenchStore } from '~/lib/stores/workbench';
import { fileModificationsToHTML } from '~/utils/diff';
import { DEFAULT_MODEL } from '~/utils/constants';
import { DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
import { cubicEasingFn } from '~/utils/easings';
import { createScopedLogger, renderLogger } from '~/utils/logger';
import { BaseChat } from './BaseChat';
Expand Down Expand Up @@ -75,6 +75,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp

const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
const [model, setModel] = useState(DEFAULT_MODEL);
const [provider, setProvider] = useState(DEFAULT_PROVIDER);

const { showChat } = useStore(chatStore);

Expand Down Expand Up @@ -188,15 +189,15 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
* manually reset the input and we'd have to manually pass in file attachments. However, those
* aren't relevant here.
*/
append({ role: 'user', content: `[Model: ${model}]\n\n${diff}\n\n${_input}` });
append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${diff}\n\n${_input}` });

/**
* After sending a new message we reset all modifications since the model
* should now be aware of all the changes.
*/
workbenchStore.resetAllFileModifications();
} else {
append({ role: 'user', content: `[Model: ${model}]\n\n${_input}` });
append({ role: 'user', content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n${_input}` });
}

setInput('');
Expand Down Expand Up @@ -228,6 +229,8 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
sendMessage={sendMessage}
model={model}
setModel={setModel}
provider={provider}
setProvider={setProvider}
messageRef={messageRef}
scrollRef={scrollRef}
handleInputChange={handleInputChange}
Expand Down
4 changes: 2 additions & 2 deletions app/components/chat/UserMessage.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { modificationsRegex } from '~/utils/diff';
import { MODEL_REGEX } from '~/utils/constants';
import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
import { Markdown } from './Markdown';

interface UserMessageProps {
Expand All @@ -17,5 +17,5 @@ export function UserMessage({ content }: UserMessageProps) {
}

function sanitizeUserMessage(content: string) {
return content.replace(modificationsRegex, '').replace(MODEL_REGEX, '').trim();
return content.replace(modificationsRegex, '').replace(MODEL_REGEX, 'Using: $1').replace(PROVIDER_REGEX, ' ($1)\n\n').trim();
}
48 changes: 27 additions & 21 deletions app/lib/.server/llm/stream-text.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
import { getSystemPrompt } from './prompts';
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER, MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';

interface ToolResult<Name extends string, Args, Result> {
toolCallId: string;
Expand All @@ -24,18 +24,22 @@ export type Messages = Message[];

export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;

function extractModelFromMessage(message: Message): { model: string; content: string } {
const modelRegex = /^\[Model: (.*?)\]\n\n/;
const match = message.content.match(modelRegex);
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
// Extract model
const modelMatch = message.content.match(MODEL_REGEX);
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;

if (match) {
const model = match[1];
const content = message.content.replace(modelRegex, '');
return { model, content };
}
// Extract provider
const providerMatch = message.content.match(PROVIDER_REGEX);
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;

// Default model if not specified
return { model: DEFAULT_MODEL, content: message.content };
// Remove model and provider lines from content
const cleanedContent = message.content
.replace(MODEL_REGEX, '')
.replace(PROVIDER_REGEX, '')
.trim();

return { model, provider, content: cleanedContent };
}

export function streamText(
Expand All @@ -45,26 +49,28 @@ export function streamText(
apiKeys?: Record<string, string>
) {
let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER;

const processedMessages = messages.map((message) => {
if (message.role === 'user') {
const { model, content } = extractModelFromMessage(message);
if (model && MODEL_LIST.find((m) => m.name === model)) {
currentModel = model; // Update the current model
const { model, provider, content } = extractPropertiesFromMessage(message);

if (MODEL_LIST.find((m) => m.name === model)) {
currentModel = model;
}

currentProvider = provider;

return { ...message, content };
}
return message;
});

const provider = MODEL_LIST.find((model) => model.name === currentModel)?.provider || DEFAULT_PROVIDER;
return message; // No changes for non-user messages
});

return _streamText({
model: getModel(provider, currentModel, env, apiKeys),
model: getModel(currentProvider, currentModel, env, apiKeys),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
// headers: {
// 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
// },
messages: convertToCoreMessages(processedMessages),
...options,
});
Expand Down
37 changes: 19 additions & 18 deletions app/utils/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
export const DEFAULT_PROVIDER = 'Anthropic';

Expand All @@ -20,7 +21,7 @@ const staticModels: ModelInfo[] = [
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google'},
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google' },
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
Expand Down Expand Up @@ -56,11 +57,11 @@ const getOllamaBaseUrl = () => {
// Frontend always uses localhost
return defaultBaseUrl;
}

// Backend: Check if we're running in Docker
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
return isDocker

return isDocker
? defaultBaseUrl.replace("localhost", "host.docker.internal")
: defaultBaseUrl;
};
Expand All @@ -82,32 +83,32 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
}

async function getOpenAILikeModels(): Promise<ModelInfo[]> {
try {
const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
if (!base_url) {
try {
const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
if (!base_url) {
return [];
}
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
const response = await fetch(`${base_url}/models`, {
headers: {
Authorization: `Bearer ${api_key}`,
}
});
}
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
const response = await fetch(`${base_url}/models`, {
headers: {
Authorization: `Bearer ${api_key}`,
}
});
const res = await response.json() as any;
return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'OpenAILike',
}));
}catch (e) {
return []
}
} catch (e) {
return []
}

}
async function initializeModelList(): Promise<void> {
const ollamaModels = await getOllamaModels();
const openAiLikeModels = await getOpenAILikeModels();
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
MODEL_LIST = [...ollamaModels, ...openAiLikeModels, ...staticModels];
}
initializeModelList().then();
export { getOllamaModels, getOpenAILikeModels, initializeModelList };

0 comments on commit 936a9c0

Please sign in to comment.