Skip to content

Commit

Permalink
Merge pull request #136 from cvgore/patch-1
Browse files Browse the repository at this point in the history
unable to start ai-service when trying to use public OpenAI
  • Loading branch information
pauldotyu authored Apr 10, 2024
2 parents 1f7ba8b + 3f62f95 commit c1ce9f8
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions src/ai-service/routers/LLM.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ def get_llm():
useLocalLLM: bool = False
useAzureOpenAI: bool = False
kernel = False

endpoint: str = ''

if os.environ.get("USE_LOCAL_LLM"):
useLocalLLM = os.environ.get("USE_LOCAL_LLM").lower() == "true"

Expand All @@ -24,7 +25,7 @@ def get_llm():

# if useLocalLLM or useAzureOpenAI are set to true, get the endpoint from the environment variables
if useLocalLLM or useAzureOpenAI:
endpoint: str = os.environ.get("AI_ENDPOINT") or os.environ.get("AZURE_OPENAI_ENDPOINT")
endpoint = os.environ.get("AI_ENDPOINT") or os.environ.get("AZURE_OPENAI_ENDPOINT")

if isinstance(endpoint, str) == False or endpoint == "":
raise Exception("AI_ENDPOINT or AZURE_OPENAI_ENDPOINT environment variable must be set when USE_LOCAL_LLM or USE_AZURE_OPENAI is set to true")
Expand Down Expand Up @@ -67,4 +68,4 @@ def get_llm():
else:
print("Authenticating to Azure OpenAI with OpenAI API key")
kernel.add_chat_service("dv", AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=api_key))
return kernel, useLocalLLM, endpoint
return kernel, useLocalLLM, endpoint

0 comments on commit c1ce9f8

Please sign in to comment.