Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[feat][WIP]: OpenAI Like and Ollama UI change #482

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
109 changes: 81 additions & 28 deletions app/components/chat/BaseChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,38 +25,91 @@ import { ExamplePrompts } from '~/components/chat/ExamplePrompts';
// @ts-ignore TODO: Introduce proper types
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
const [customUrl, setCustomUrl] = useState('');
const [customModel, setCustomModel] = useState('');

useEffect(() => {
if (provider?.name === 'OpenAILike') {
setCustomUrl(import.meta.env.OPENAI_LIKE_API_BASE_URL || '');
setCustomModel(model || '');
} else if (provider?.name === 'Ollama') {
setCustomUrl(import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434');
}
}, [provider?.name]);

return (
<div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name}
onChange={(e) => {
setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));
<div className="mb-2 flex gap-2 flex-col">
<div className="flex gap-2 flex-col sm:flex-row">
<select
value={provider?.name}
onChange={(e) => {
setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));

const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : '');
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}>
{provider.name}
</option>
))}
</select>
<select
key={provider?.name}
value={model}
onChange={(e) => setModel(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%] "
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : '');
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}>
{provider.name}
</option>
))}
</select>
</select>
{provider?.name !== 'OpenAILike' && (
<select
key={provider?.name}
value={model}
onChange={(e) => setModel(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
)}
</div>
{(provider?.name === 'OpenAILike' || provider?.name === 'Ollama') && (
<div className="flex gap-2 flex-col sm:flex-row">
<input
type="text"
placeholder={
provider?.name === 'Ollama'
? 'Ollama API URL (default: http://localhost:11434)'
: 'Enter API Base URL (e.g., http://localhost:1234/v1)'
}
value={customUrl}
onChange={(e) => {
setCustomUrl(e.target.value);

if (typeof window !== 'undefined') {
if (provider?.name === 'OpenAILike') {
window.localStorage.setItem('OPENAI_LIKE_API_BASE_URL', e.target.value);
} else if (provider?.name === 'Ollama') {
window.localStorage.setItem('OLLAMA_API_BASE_URL', e.target.value || 'http://localhost:11434');
}
}
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
/>
{provider?.name === 'OpenAILike' && (
<input
type="text"
placeholder="Enter Model Name"
value={customModel}
onChange={(e) => {
setCustomModel(e.target.value);
setModel(e.target.value);
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
/>
)}
</div>
)}
</div>
);
};
Expand Down
8 changes: 6 additions & 2 deletions app/lib/.server/llm/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,13 @@ export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
return anthropic(model);
}
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
if (!baseURL) {
throw new Error('OpenAI Like API Base URL is required');
}

const openai = createOpenAI({
baseURL,
apiKey,
baseURL: baseURL.endsWith('/v1') ? baseURL : `${baseURL}/v1`,
apiKey: apiKey || 'not-needed',
});

return openai(model);
Expand Down
2 changes: 1 addition & 1 deletion app/lib/runtime/action-runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ export class ActionRunner {

if (!action) {
unreachable(`Action ${actionId} not found`);
return;
}

if (action.executed) {
Expand All @@ -100,7 +101,6 @@ export class ActionRunner {
.catch((error) => {
console.error('Action failed:', error);
});
return this.#currentExecutionPromise;
}

async #executeAction(actionId: string, isStreaming: boolean = false) {
Expand Down
1 change: 1 addition & 0 deletions app/types/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ export type ProviderInfo = {
getApiKeyLink?: string;
labelForGetApiKey?: string;
icon?: string;
description?: string;
};
68 changes: 45 additions & 23 deletions app/utils/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ const PROVIDER_LIST: ProviderInfo[] = [
name: 'OpenAILike',
staticModels: [],
getDynamicModels: getOpenAILikeModels,
getApiKeyLink: '',
labelForGetApiKey: 'API Key (Optional)',
},
{
name: 'Cohere',
Expand Down Expand Up @@ -268,25 +270,24 @@ const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(
export let MODEL_LIST: ModelInfo[] = [...staticModels];

const getOllamaBaseUrl = () => {
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';

// Check if we're in the browser
if (typeof window !== 'undefined') {
// Frontend always uses localhost
return defaultBaseUrl;
// Try to get URL from localStorage first, then env, then default
return (
window.localStorage.getItem('OLLAMA_API_BASE_URL') ||
import.meta.env.OLLAMA_API_BASE_URL ||
'http://localhost:11434'
);
}

// Backend: Check if we're running in Docker
const defaultBaseUrl = process.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';

return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
};

async function getOllamaModels(): Promise<ModelInfo[]> {
//if (typeof window === 'undefined') {
//return [];
//}

try {
const baseUrl = getOllamaBaseUrl();
const response = await fetch(`${baseUrl}/api/tags`);
Expand All @@ -298,35 +299,56 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
provider: 'Ollama',
maxTokenAllowed: 8000,
}));
} catch (e) {
console.error('Error getting Ollama models:', e);
} catch (error) {
console.error('Error getting Ollama models:', error);
return [];
}
}

async function getOpenAILikeModels(): Promise<ModelInfo[]> {
try {
const baseUrl = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
const customModel = typeof window !== 'undefined' ? window.localStorage.getItem('OPENAI_LIKE_MODEL') : '';

if (!baseUrl) {
return [];
}

const apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;

return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'OpenAILike',
}));
} catch (e) {
console.error('Error getting OpenAILike models:', e);
try {
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;

return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'OpenAILike',
maxTokenAllowed: 8000,
}));
} catch (err) {
console.error('Error getting OpenAILike models:', err);

// If we can't fetch models, return a default model if one is set
if (customModel) {
return [
{
name: customModel,
label: customModel,
provider: 'OpenAILike',
maxTokenAllowed: 8000,
},
];
}

return [];
}
} catch (err) {
console.error('Error getting OpenAILike models:', err);
return [];
}
}
Expand Down
2 changes: 1 addition & 1 deletion app/utils/logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ interface Logger {
setLevel: (level: DebugLevel) => void;
}

let currentLevel: DebugLevel = import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV ? 'debug' : 'info';
let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';

const isWorker = 'HTMLRewriter' in globalThis;
const supportsColor = !isWorker;
Expand Down
Loading