diff --git a/.env.example b/.env.example index 55698926a..46a21e892 100644 --- a/.env.example +++ b/.env.example @@ -43,10 +43,16 @@ OPENAI_LIKE_API_KEY= # You only need this environment variable set if you want to use Mistral models MISTRAL_API_KEY= + # Get LMStudio Base URL from LM Studio Developer Console # Make sure to enable CORS # Example: http://localhost:1234 LMSTUDIO_API_BASE_URL= +# Get your xAI API key +# https://x.ai/api +# You only need this environment variable set if you want to use xAI models +XAI_API_KEY= + # Include this environment variable if you want more logging for debugging locally VITE_LOG_LEVEL=debug diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index 1a641a395..7d12666d8 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -25,6 +25,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) { return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; case "OpenAILike": return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY; + case "xAI": + return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY; default: return ""; } diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 6e73ecda8..ceed694f3 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -58,7 +58,10 @@ export function getGroqModel(apiKey: string, model: string) { } export function getOllamaModel(baseURL: string, model: string) { - let Ollama = ollama(model); + let Ollama = ollama(model, { + numCtx: 32768, + }); + Ollama.config.baseURL = `${baseURL}/api`; return Ollama; } @@ -88,6 +91,15 @@ export function getLMStudioModel(baseURL: string, model: string) { return lmstudio(model); } +export function getXAIModel(apiKey: string, model: string) { + const openai = createOpenAI({ + baseURL: 'https://api.x.ai/v1', + apiKey, + }); + + return openai(model); +} + export function getModel(provider: string, model: string, env: Env) { const apiKey = getAPIKey(env, provider); const baseURL = getBaseURL(env, provider); @@ -111,6 +123,8 @@ export function getModel(provider: string, model: string, env: Env) { return getMistralModel(apiKey, model); case 'LMStudio': return getLMStudioModel(baseURL, model); + case 'xAI': + return getXAIModel(apiKey, model); default: return getOllamaModel(baseURL, model); } diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 409c5557c..8189890f9 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -15,6 +15,7 @@ const staticModels: ModelInfo[] = [ { name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' }, { name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' }, { name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' }, + { name: 'x-ai/grok-beta', label: "xAI Grok Beta (OpenRouter)", provider: 'OpenRouter' }, { name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' }, { name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' }, { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' }, @@ -32,6 +33,7 @@ const staticModels: ModelInfo[] = [ { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' }, { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' }, { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }, + { name: 'grok-beta', label: "xAI Grok Beta", provider: 'xAI' }, { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'}, { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'}, { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },