Skip to content

Commit

Permalink
Merge pull request #78 from guardrails-ai/060_litellm_updates
Browse files Browse the repository at this point in the history
update for 060 litellm callable updates
  • Loading branch information
zsimjee authored Oct 18, 2024
2 parents 94617a2 + 9c4afd3 commit fc7a036
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 16 deletions.
2 changes: 1 addition & 1 deletion guardrails_api/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.0.3"
__version__ = "0.1.0-alpha1"
18 changes: 3 additions & 15 deletions guardrails_api/utils/get_llm_callable.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,15 @@
import litellm
from typing import Any, Awaitable, Callable, Union
from guardrails.utils.openai_utils import (
get_static_openai_create_func,
get_static_openai_chat_create_func,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
)
from guardrails_api_client.models.llm_resource import LLMResource


def get_llm_callable(
llm_api: str,
) -> Union[Callable, Callable[[Any], Awaitable[Any]]]:
# TODO: Add error handling and throw 400
if llm_api == LLMResource.OPENAI_DOT_COMPLETION_DOT_CREATE.value:
return get_static_openai_create_func()
elif llm_api == LLMResource.OPENAI_DOT_CHAT_COMPLETION_DOT_CREATE.value:
return get_static_openai_chat_create_func()
elif llm_api == LLMResource.OPENAI_DOT_COMPLETION_DOT_ACREATE.value:
return get_static_openai_acreate_func()
elif llm_api == LLMResource.OPENAI_DOT_CHAT_COMPLETION_DOT_ACREATE.value:
return get_static_openai_chat_acreate_func()
elif llm_api == LLMResource.LITELLM_DOT_COMPLETION.value:
# do we need this anymore if were going to use the default handling
# and only set model
if llm_api == LLMResource.LITELLM_DOT_COMPLETION.value:
return litellm.completion
elif llm_api == LLMResource.LITELLM_DOT_ACOMPLETION.value:
return litellm.acompletion
Expand Down

0 comments on commit fc7a036

Please sign in to comment.