Skip to content

Commit

Permalink
fix error in llama-index-azure-gpt model getter
Browse files Browse the repository at this point in the history
  • Loading branch information
rchan26 committed Jun 12, 2024
1 parent 0d2737b commit 20a3eb5
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 4 deletions.
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ httpx = "^0.27.0"
llama-cpp-python = "^0.2.78"
llama-index = "^0.10.43"
nest_asyncio = "^1.6.0"
openai = "^1.33.0"
openai = "^1.34.0"
pandas = "^2.2.2"
pulumi = { version="^3.100.0", optional=true }
pulumi-azure-native = { version="^2.24.0", optional=true }
Expand Down
4 changes: 2 additions & 2 deletions reginald/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ def get_model(model_name: str):

return LlamaIndexHF
case "llama-index-gpt-azure":
from reginald.models.llama_index.llama_index_openai import (
LlamaIndexGPTOpenAI,
from reginald.models.llama_index.llama_index_azure_openai import (
LlamaIndexGPTAzure,
)

return LlamaIndexGPTOpenAI
Expand Down
2 changes: 2 additions & 0 deletions reginald/models/chat_interact.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ def run_chat_interact(streaming: bool = False, **kwargs) -> ResponseModel:
message = input(">>> ")
if message in ["exit", "exit()", "quit()", "bye Reginald"]:
return response_model
if message == "":
continue
if message in ["clear_history", "\clear_history"]:
if (
response_model.mode == "chat"
Expand Down

0 comments on commit 20a3eb5

Please sign in to comment.