Skip to content

Commit

Permalink
utils.env, utils.strings
Browse files Browse the repository at this point in the history
  • Loading branch information
efriis committed Nov 20, 2023
1 parent 3e3b161 commit ea28c47
Show file tree
Hide file tree
Showing 162 changed files with 281 additions and 244 deletions.
7 changes: 0 additions & 7 deletions libs/core/langchain_core/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,13 @@
These functions do not depend on any other LangChain module.
"""

from langchain_core.utils.env import get_from_dict_or_env, get_from_env
from langchain_core.utils.formatting import StrictFormatter, formatter
from langchain_core.utils.input import (
get_bolded_text,
get_color_mapping,
get_colored_text,
print_text,
)
from langchain_core.utils.strings import comma_list, stringify_dict, stringify_value
from langchain_core.utils.utils import (
check_package_version,
convert_to_secret_str,
Expand All @@ -26,20 +24,15 @@
__all__ = [
"StrictFormatter",
"check_package_version",
"comma_list",
"convert_to_secret_str",
"formatter",
"get_bolded_text",
"get_color_mapping",
"get_colored_text",
"get_from_dict_or_env",
"get_from_env",
"get_pydantic_field_names",
"guard_import",
"mock_now",
"print_text",
"raise_for_status_with_text",
"stringify_dict",
"stringify_value",
"xor_args",
]
26 changes: 0 additions & 26 deletions libs/core/langchain_core/utils/env.py

This file was deleted.

40 changes: 0 additions & 40 deletions libs/core/langchain_core/utils/strings.py

This file was deleted.

5 changes: 0 additions & 5 deletions libs/core/tests/unit_tests/utils/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,16 @@
EXPECTED_ALL = [
"StrictFormatter",
"check_package_version",
"comma_list",
"convert_to_secret_str",
"formatter",
"get_bolded_text",
"get_color_mapping",
"get_colored_text",
"get_from_dict_or_env",
"get_from_env",
"get_pydantic_field_names",
"guard_import",
"mock_now",
"print_text",
"raise_for_status_with_text",
"stringify_dict",
"stringify_value",
"xor_args",
]

Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@
from langchain_core.schema import ChatGeneration, Generation
from langchain_core.schema.cache import RETURN_VAL_TYPE, BaseCache
from langchain_core.schema.embeddings import Embeddings
from langchain_core.utils import get_from_env

from langchain.llms.base import LLM, get_prompts
from langchain.utils import get_from_env
from langchain.vectorstores.redis import Redis as RedisVectorstore

logger = logging.getLogger(__file__)
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/callbacks/mlflow_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from typing import Any, Dict, List, Optional, Union

from langchain_core.schema import AgentAction, AgentFinish, LLMResult
from langchain_core.utils import get_from_dict_or_env

from langchain.callbacks.base import BaseCallbackHandler
from langchain.callbacks.utils import (
Expand All @@ -19,6 +18,7 @@
import_spacy,
import_textstat,
)
from langchain.utils import get_from_dict_or_env


def import_mlflow() -> Any:
Expand Down
3 changes: 1 addition & 2 deletions libs/langchain/langchain/callbacks/whylabs_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
import logging
from typing import TYPE_CHECKING, Any, Optional

from langchain_core.utils import get_from_env

from langchain.callbacks.base import BaseCallbackHandler
from langchain.utils import get_from_env

if TYPE_CHECKING:
from whylogs.api.logger.logger import Logger
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chains/moderation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
from typing import Any, Dict, List, Optional

from langchain_core.pydantic_v1 import root_validator
from langchain_core.utils import get_from_dict_or_env

from langchain.callbacks.manager import CallbackManagerForChainRun
from langchain.chains.base import Chain
from langchain.utils import get_from_dict_or_env


class OpenAIModerationChain(Chain):
Expand Down
3 changes: 2 additions & 1 deletion libs/langchain/langchain/chat_models/anyscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,15 @@
import requests
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
from langchain_core.schema.messages import BaseMessage
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
from langchain_core.utils import convert_to_secret_str
from langchain_core.utils.openai import is_openai_v1

from langchain.adapters.openai import convert_message_to_dict
from langchain.chat_models.openai import (
ChatOpenAI,
_import_tiktoken,
)
from langchain.utils import get_from_dict_or_env

if TYPE_CHECKING:
import tiktoken
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/azure_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@

from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
from langchain_core.schema import ChatResult
from langchain_core.utils import get_from_dict_or_env
from langchain_core.utils.openai import is_openai_v1

from langchain.chat_models.openai import ChatOpenAI
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
3 changes: 2 additions & 1 deletion libs/langchain/langchain/chat_models/azureml_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@
HumanMessage,
SystemMessage,
)
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
from langchain_core.utils import convert_to_secret_str

from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.chat_models.base import SimpleChatModel
from langchain.llms.azureml_endpoint import AzureMLEndpointClient, ContentFormatterBase
from langchain.utils import get_from_dict_or_env


class LlamaContentFormatter(ContentFormatterBase):
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/baichuan.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import (
convert_to_secret_str,
get_from_dict_or_env,
get_pydantic_field_names,
)

from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.chat_models.base import BaseChatModel, _generate_from_stream
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@
SystemMessage,
)
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import get_from_dict_or_env

from langchain.callbacks.manager import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import BaseChatModel
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/ernie.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@
ChatResult,
HumanMessage,
)
from langchain_core.utils import get_from_dict_or_env

from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.chat_models.base import BaseChatModel
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/everlyai.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@

from langchain_core.pydantic_v1 import Field, root_validator
from langchain_core.schema.messages import BaseMessage
from langchain_core.utils import get_from_dict_or_env

from langchain.adapters.openai import convert_message_to_dict
from langchain.chat_models.openai import (
ChatOpenAI,
_import_tiktoken,
)
from langchain.utils import get_from_dict_or_env

if TYPE_CHECKING:
import tiktoken
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/google_palm.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
HumanMessage,
SystemMessage,
)
from langchain_core.utils import get_from_dict_or_env
from tenacity import (
before_sleep_log,
retry,
Expand All @@ -30,6 +29,7 @@
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import BaseChatModel
from langchain.utils import get_from_dict_or_env

if TYPE_CHECKING:
import google.generativeai as genai
Expand Down
3 changes: 2 additions & 1 deletion libs/langchain/langchain/chat_models/jinachat.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
SystemMessageChunk,
)
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
from langchain_core.utils import get_pydantic_field_names
from tenacity import (
before_sleep_log,
retry,
Expand All @@ -53,6 +53,7 @@
_agenerate_from_stream,
_generate_from_stream,
)
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/konko.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@
from langchain_core.schema import ChatGeneration, ChatResult
from langchain_core.schema.messages import AIMessageChunk, BaseMessage
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import get_from_dict_or_env

from langchain.adapters.openai import convert_dict_to_message, convert_message_to_dict
from langchain.callbacks.manager import (
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import BaseChatModel, _generate_from_stream
from langchain.chat_models.openai import _convert_delta_to_message_chunk
from langchain.utils import get_from_dict_or_env

DEFAULT_API_BASE = "https://api.konko.ai/v1"
DEFAULT_MODEL = "meta-llama/Llama-2-13b-chat-hf"
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
SystemMessageChunk,
)
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import get_from_dict_or_env

from langchain.callbacks.manager import (
AsyncCallbackManagerForLLMRun,
Expand All @@ -48,6 +47,7 @@
_generate_from_stream,
)
from langchain.llms.base import create_base_retry_decorator
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
)
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import (
get_from_dict_or_env,
get_pydantic_field_names,
)
from langchain_core.utils.openai import is_openai_v1
Expand All @@ -52,6 +51,7 @@
_generate_from_stream,
)
from langchain.llms.base import create_base_retry_decorator
from langchain.utils import get_from_dict_or_env

if TYPE_CHECKING:
import tiktoken
Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/pai_eas_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
SystemMessage,
)
from langchain_core.schema.output import ChatGenerationChunk
from langchain_core.utils import get_from_dict_or_env

from langchain.callbacks.manager import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain.chat_models.base import BaseChatModel
from langchain.llms.utils import enforce_stop_tokens
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
2 changes: 1 addition & 1 deletion libs/langchain/langchain/chat_models/tongyi.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
SystemMessageChunk,
)
from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk
from langchain_core.utils import get_from_dict_or_env
from requests.exceptions import HTTPError
from tenacity import (
RetryCallState,
Expand All @@ -45,6 +44,7 @@
BaseChatModel,
_generate_from_stream,
)
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)

Expand Down
Loading

0 comments on commit ea28c47

Please sign in to comment.