Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: added AzureOpenAI #98

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@ GROQ_API_KEY=
# You only need this environment variable set if you want to use GPT models
OPENAI_API_KEY=

# Get your Aure Open AI API Key by following these instructions -
# You only need this environment variable set if you want to use GPT models
# https://{AZURE_OPENAI_API_BASE_URL}/openai/models?api-version=2024-06-01
AZURE_OPENAI_API_BASE_URL=
AZURE_OPENAI_API_KEY=

# Get your Anthropic API Key in your account settings -
# https://console.anthropic.com/settings/keys
# You only need this environment variable set if you want to use Claude models
Expand Down
3 changes: 3 additions & 0 deletions app/components/chat/BaseChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
<option key="OpenAILike" value="OpenAILike">
OpenAILike
</option>
<option key="AzureOpenAI" value="AzureOpenAI">
AzureOpenAI
</option>
</select>
<select
value={model}
Expand Down
4 changes: 4 additions & 0 deletions app/lib/.server/llm/api-key.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
case 'OpenAI':
return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY;
case 'AzureOpenAI':
return env.AZURE_OPENAI_API_KEY || cloudflareEnv.AZURE_OPENAI_API_KEY;
case 'Google':
return env.GOOGLE_GENERATIVE_AI_API_KEY || cloudflareEnv.GOOGLE_GENERATIVE_AI_API_KEY;
case 'Groq':
Expand All @@ -34,6 +36,8 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
switch (provider) {
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'AzureOpenAI':
return env.AZURE_OPENAI_API_BASE_URL || cloudflareEnv.AZURE_OPENAI_API_BASE_URL;
case 'Ollama':
return env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
default:
Expand Down
4 changes: 3 additions & 1 deletion app/lib/.server/llm/constants.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
// see https://docs.anthropic.com/en/docs/about-claude/models
export const MAX_TOKENS = 8000;
// export const MAX_TOKENS = 8000;
// Change MAX_TOKENS to 4000 for GPT-4o
export const MAX_TOKENS = 4000;

// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;
26 changes: 25 additions & 1 deletion app/lib/.server/llm/model.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { getAPIKey, getBaseURL, getResourceName } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { mistral } from '@ai-sdk/mistral';
import { createMistral } from '@ai-sdk/mistral';
import { createAzure } from '@ai-sdk/azure';

export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
Expand All @@ -32,6 +33,27 @@ export function getOpenAIModel(apiKey: string, model: string) {
return openai(model);
}

export function getAzureOpenAIModel(baseURL:string,apiKey: string, model: string) {
function extractResourceName(url) {
const regex = /^https:\/\/([^\.]+)\./;
const match = url.match(regex);
if (match && match[1]) {
return match[1];
} else {
return null; // or throw an error, depending on how you want to handle this case
}
}

const resourceName = extractResourceName(baseURL);

const azureOpenAI = createAzure({
apiKey,
resourceName: resourceName,
});

return azureOpenAI(model);
}

export function getMistralModel(apiKey: string, model: string) {
const mistral = createMistral({
apiKey
Expand Down Expand Up @@ -89,6 +111,8 @@ export function getModel(provider: string, model: string, env: Env) {
return getAnthropicModel(apiKey, model);
case 'OpenAI':
return getOpenAIModel(apiKey, model);
case 'AzureOpenAI':
return getAzureOpenAIModel(baseURL,apiKey, model);
case 'Groq':
return getGroqModel(apiKey, model);
case 'OpenRouter':
Expand Down
32 changes: 30 additions & 2 deletions app/utils/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,38 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
}

}

async function getAzureOpenAIModels(): Promise<ModelInfo[]> {
try {
const base_url = import.meta.env.AZURE_OPENAI_API_BASE_URL || "";
if (!base_url) {
return [];
}
const api_key = import.meta.env.AZURE_OPENAI_API_KEY;
const response = await fetch(`${base_url}/openai/deployments?api-version=2023-03-15-preview`, {
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@HuynhDoTanThanh would it make sense to make the API Version parametrizable in .env as well?

headers: {
'api-key': api_key
}
});
const res = await response.json() as any;

return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'AzureOpenAI'
}));

} catch (e) {
return [];
}
}


async function initializeModelList(): Promise<void> {
const ollamaModels = await getOllamaModels();
const openAiLikeModels = await getOpenAILikeModels();
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
const azureOpenAIModels = await getAzureOpenAIModels();
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels, ...azureOpenAIModels];
}
initializeModelList().then();
export { getOllamaModels, getOpenAILikeModels, initializeModelList };
export { getOllamaModels, getOpenAILikeModels, getAzureOpenAIModels, initializeModelList };
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"@ai-sdk/anthropic": "^0.0.39",
"@ai-sdk/google": "^0.0.52",
"@ai-sdk/openai": "^0.0.66",
"@ai-sdk/azure": "^0.0.48",
"@ai-sdk/mistral": "^0.0.43",
"@codemirror/autocomplete": "^6.17.0",
"@codemirror/commands": "^6.6.0",
Expand Down
28 changes: 28 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion vite.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ export default defineConfig((config) => {
chrome129IssuePlugin(),
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
],
envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL", "AZURE_OPENAI_API_"],
css: {
preprocessorOptions: {
scss: {
Expand Down
2 changes: 2 additions & 0 deletions worker-configuration.d.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
interface Env {
ANTHROPIC_API_KEY: string;
OPENAI_API_KEY: string;
AZURE_OPENAI_API_BASE_URL: string;
AZURE_OPENAI_API_KEY: string;
GROQ_API_KEY: string;
OPEN_ROUTER_API_KEY: string;
OLLAMA_API_BASE_URL: string;
Expand Down
Loading