Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "mypy init + refactor llm import logic + tests" #111

Merged
merged 1 commit into from
Jun 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,3 @@ jobs:
python-version: "3.9"
- name: Run pre-commit
uses: pre-commit/[email protected]

14 changes: 1 addition & 13 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,4 @@ repos:
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.9.0
hooks:
- id: mypy
additional_dependencies:
- pydantic>=2,<3.0.0
- langchain_core
- langchain_anthropic
- langchain_openai
- langchain_google_genai
files: ^(src/controlflow/llm/models.py)$
args: [--strict]
- id: ruff-format
4 changes: 0 additions & 4 deletions mypy.ini

This file was deleted.

2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ tests = [
"pytest-sugar>=0.9,<2.0",
"pytest>=7.0",
"pytest-timeout",
"pytest_mock",
"pytest-xdist",
"pandas",
]
Expand All @@ -58,7 +57,6 @@ dev = [
"pre-commit",
"ruff>=0.3.4",
"textual-dev",
"mypy",
]

[build-system]
Expand Down
98 changes: 49 additions & 49 deletions src/controlflow/llm/models.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,61 @@
from importlib import import_module
from typing import TYPE_CHECKING, Any, Optional, Union

from langchain_core.language_models import BaseChatModel

import controlflow

if TYPE_CHECKING:
from langchain_anthropic import ChatAnthropic
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_openai import AzureChatOpenAI, ChatOpenAI

_model_registry: dict[str, tuple[str, str]] = {
"openai": ("langchain_openai", "ChatOpenAI"),
"azure_openai": ("langchain_openai", "AzureChatOpenAI"),
"anthropic": ("langchain_anthropic", "ChatAnthropic"),
"google": ("langchain_google_genai", "ChatGoogleGenerativeAI"),
}


def get_provider_from_string(
provider: str,
) -> Union[
type["ChatOpenAI"],
type["AzureChatOpenAI"],
type["ChatAnthropic"],
type["ChatGoogleGenerativeAI"],
]:
module_name, class_name = _model_registry.get(provider, ("openai", ""))
if not class_name:
raise ValueError(
f"Could not load provider automatically: {provider}. Please create your model manually."
)
try:
module = import_module(module_name)
except ImportError:
raise ImportError(
f"To use {provider} models, please install the `{module_name}` package."
)
return getattr(module, class_name) # type: ignore[no-any-return]


def get_model_from_string(
model: Optional[str] = None, temperature: Optional[float] = None, **kwargs: Any
) -> BaseChatModel:
provider, _, model = (model or controlflow.settings.llm_model).partition("/")
return get_provider_from_string(provider=provider)(
name=model or controlflow.settings.llm_model,
temperature=temperature or controlflow.settings.llm_temperature,
**kwargs,
)


def get_default_model() -> BaseChatModel:
if controlflow.default_model is None:
return get_model_from_string(controlflow.settings.llm_model)
return model_from_string(controlflow.settings.llm_model)
else:
return controlflow.default_model


def model_from_string(model: str, temperature: float = None, **kwargs) -> BaseChatModel:
if "/" not in model:
provider, model = "openai", model
provider, model = model.split("/")

if temperature is None:
temperature = controlflow.settings.llm_temperature

if provider == "openai":
try:
from langchain_openai import ChatOpenAI
except ImportError:
raise ImportError(
"To use OpenAI models, please install the `langchain-openai` package."
)
cls = ChatOpenAI
elif provider == "azure-openai":
try:
from langchain_openai import AzureChatOpenAI
except ImportError:
raise ImportError(
"To use Azure OpenAI models, please install the `langchain-openai` package."
)
cls = AzureChatOpenAI
elif provider == "anthropic":
try:
from langchain_anthropic import ChatAnthropic
except ImportError:
raise ImportError(
"To use Anthropic models, please install the `langchain-anthropic` package."
)
cls = ChatAnthropic
elif provider == "google":
try:
from langchain_google_genai import ChatGoogleGenerativeAI
except ImportError:
raise ImportError(
"To use Google models, please install the `langchain_google_genai` package."
)
cls = ChatGoogleGenerativeAI
else:
raise ValueError(
f"Could not load provider automatically: {provider}. Please create your model manually."
)

return cls(model=model, temperature=temperature, **kwargs)


DEFAULT_MODEL = None
79 changes: 0 additions & 79 deletions tests/llm/test_models.py

This file was deleted.

Loading