Skip to content

Commit

Permalink
Added Nvidia Models
Browse files Browse the repository at this point in the history
  • Loading branch information
hitendraa committed Nov 12, 2024
1 parent 1630d80 commit bed1e96
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 0 deletions.
5 changes: 5 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -54,5 +54,10 @@ LMSTUDIO_API_BASE_URL=
# You only need this environment variable set if you want to use xAI models
XAI_API_KEY=

#Get your Nvidia APIkey
#https://build.nvidia.com/
# You only need this environment variable set if you want to use Nvidia models
NVIDIA_API_KEY=

# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug
4 changes: 4 additions & 0 deletions app/lib/.server/llm/api-key.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,17 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
case "xAI":
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
case 'Nvidia':
return env.NVIDIA_API_KEY || cloudflareEnv.NVIDIA_API_KEY;
default:
return "";
}
}

export function getBaseURL(cloudflareEnv: Env, provider: string) {
switch (provider) {
case 'Nvidia':
return 'https://integrate.api.nvidia.com/v1';
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'LMStudio':
Expand Down
12 changes: 12 additions & 0 deletions app/lib/.server/llm/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,16 @@ import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { mistral } from '@ai-sdk/mistral';
import { createMistral } from '@ai-sdk/mistral';


export function getNvidiaModel(apiKey: string, model: string) {
const openai = createOpenAI({
baseURL: 'https://integrate.api.nvidia.com/v1',
apiKey,
});

return openai(model);
}

export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
apiKey,
Expand Down Expand Up @@ -105,6 +115,8 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
const baseURL = getBaseURL(env, provider);

switch (provider) {
case 'Nvidia':
return getNvidiaModel(apiKey, model);
case 'Anthropic':
return getAnthropicModel(apiKey, model);
case 'OpenAI':
Expand Down
3 changes: 3 additions & 0 deletions app/utils/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ const staticModels: ModelInfo[] = [
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
{ name: 'grok-beta', label: "xAI Grok Beta", provider: 'xAI' },
{ name: 'nvidia/llama-3.1-nemotron-70b-instruct', label: 'Llama 3.1 Nemotron 70B Instruct', provider: 'Nvidia' },
{ name: 'meta/llama-3.1-405b-instruct', label: 'Llama 3.1 405B Instruct', provider: 'Nvidia'},
{ name: 'meta/codellama-70b', label: 'Codellama 70B', provider: 'Nvidia'},
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
Expand Down

0 comments on commit bed1e96

Please sign in to comment.