diff --git a/libs/core/langchain_core/_api/__init__.py b/libs/core/langchain_core/_api/__init__.py index e013a72129f82..6ae169cc393be 100644 --- a/libs/core/langchain_core/_api/__init__.py +++ b/libs/core/langchain_core/_api/__init__.py @@ -16,9 +16,12 @@ surface_langchain_deprecation_warnings, warn_deprecated, ) +from .path import as_import_path, get_relative_path __all__ = [ + "as_import_path", "deprecated", + "get_relative_path", "LangChainDeprecationWarning", "suppress_langchain_deprecation_warning", "surface_langchain_deprecation_warnings", diff --git a/libs/core/langchain_core/schema/agent.py b/libs/core/langchain_core/agents.py similarity index 98% rename from libs/core/langchain_core/schema/agent.py rename to libs/core/langchain_core/agents.py index 94d9d60dc1823..e9b3ab3f5f974 100644 --- a/libs/core/langchain_core/schema/agent.py +++ b/libs/core/langchain_core/agents.py @@ -3,7 +3,7 @@ from typing import Any, Literal, Sequence, Union from langchain_core.load.serializable import Serializable -from langchain_core.schema.messages import BaseMessage +from langchain_core.messages import BaseMessage class AgentAction(Serializable): diff --git a/libs/core/langchain_core/schema/cache.py b/libs/core/langchain_core/caches.py similarity index 92% rename from libs/core/langchain_core/schema/cache.py rename to libs/core/langchain_core/caches.py index fe132c5728d12..c14959c8f9c6b 100644 --- a/libs/core/langchain_core/schema/cache.py +++ b/libs/core/langchain_core/caches.py @@ -3,7 +3,7 @@ from abc import ABC, abstractmethod from typing import Any, Optional, Sequence -from langchain_core.schema.output import Generation +from langchain_core.outputs import Generation RETURN_VAL_TYPE = Sequence[Generation] diff --git a/libs/core/langchain_core/callbacks/__init__.py b/libs/core/langchain_core/callbacks/__init__.py index e69de29bb2d1d..e6da4809b4ea9 100644 --- a/libs/core/langchain_core/callbacks/__init__.py +++ b/libs/core/langchain_core/callbacks/__init__.py @@ -0,0 +1,69 @@ +from langchain_core.callbacks.base import ( + AsyncCallbackHandler, + BaseCallbackHandler, + BaseCallbackManager, + CallbackManagerMixin, + Callbacks, + ChainManagerMixin, + LLMManagerMixin, + RetrieverManagerMixin, + RunManagerMixin, + ToolManagerMixin, +) +from langchain_core.callbacks.manager import ( + AsyncCallbackManager, + AsyncCallbackManagerForChainGroup, + AsyncCallbackManagerForChainRun, + AsyncCallbackManagerForLLMRun, + AsyncCallbackManagerForRetrieverRun, + AsyncCallbackManagerForToolRun, + AsyncParentRunManager, + AsyncRunManager, + BaseRunManager, + CallbackManager, + CallbackManagerForChainGroup, + CallbackManagerForChainRun, + CallbackManagerForLLMRun, + CallbackManagerForRetrieverRun, + CallbackManagerForToolRun, + ParentRunManager, + RunManager, + env_var_is_set, + register_configure_hook, +) +from langchain_core.callbacks.stdout import StdOutCallbackHandler +from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler + +__all__ = [ + "RetrieverManagerMixin", + "LLMManagerMixin", + "ChainManagerMixin", + "ToolManagerMixin", + "Callbacks", + "CallbackManagerMixin", + "RunManagerMixin", + "BaseCallbackHandler", + "AsyncCallbackHandler", + "BaseCallbackManager", + "BaseRunManager", + "RunManager", + "ParentRunManager", + "AsyncRunManager", + "AsyncParentRunManager", + "CallbackManagerForLLMRun", + "AsyncCallbackManagerForLLMRun", + "CallbackManagerForChainRun", + "AsyncCallbackManagerForChainRun", + "CallbackManagerForToolRun", + "AsyncCallbackManagerForToolRun", + "CallbackManagerForRetrieverRun", + "AsyncCallbackManagerForRetrieverRun", + "CallbackManager", + "CallbackManagerForChainGroup", + "AsyncCallbackManager", + "AsyncCallbackManagerForChainGroup", + "StdOutCallbackHandler", + "StreamingStdOutCallbackHandler", + "env_var_is_set", + "register_configure_hook", +] diff --git a/libs/core/langchain_core/callbacks/base.py b/libs/core/langchain_core/callbacks/base.py index 030e5ecbaf3a0..b3d86648d6d57 100644 --- a/libs/core/langchain_core/callbacks/base.py +++ b/libs/core/langchain_core/callbacks/base.py @@ -6,10 +6,10 @@ from tenacity import RetryCallState -from langchain_core.schema.agent import AgentAction, AgentFinish -from langchain_core.schema.document import Document -from langchain_core.schema.messages import BaseMessage -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.documents import Document +from langchain_core.messages import BaseMessage +from langchain_core.outputs import ChatGenerationChunk, GenerationChunk, LLMResult class RetrieverManagerMixin: diff --git a/libs/core/langchain_core/callbacks/manager.py b/libs/core/langchain_core/callbacks/manager.py index efd4d5550afc7..1ae22a703b7b1 100644 --- a/libs/core/langchain_core/callbacks/manager.py +++ b/libs/core/langchain_core/callbacks/manager.py @@ -30,6 +30,10 @@ from langsmith.run_helpers import get_run_tree_context from tenacity import RetryCallState +from langchain_core.agents import ( + AgentAction, + AgentFinish, +) from langchain_core.callbacks.base import ( BaseCallbackHandler, BaseCallbackManager, @@ -41,23 +45,16 @@ ToolManagerMixin, ) from langchain_core.callbacks.stdout import StdOutCallbackHandler -from langchain_core.callbacks.tracers import run_collector -from langchain_core.callbacks.tracers.langchain import ( +from langchain_core.documents import Document +from langchain_core.messages import BaseMessage, get_buffer_string +from langchain_core.outputs import ChatGenerationChunk, GenerationChunk, LLMResult +from langchain_core.tracers import run_collector +from langchain_core.tracers.langchain import ( LangChainTracer, ) -from langchain_core.callbacks.tracers.langchain_v1 import ( - LangChainTracerV1, - TracerSessionV1, -) -from langchain_core.callbacks.tracers.stdout import ConsoleCallbackHandler -from langchain_core.schema import ( - AgentAction, - AgentFinish, - Document, - LLMResult, -) -from langchain_core.schema.messages import BaseMessage, get_buffer_string -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk +from langchain_core.tracers.langchain_v1 import LangChainTracerV1 +from langchain_core.tracers.schemas import TracerSessionV1 +from langchain_core.tracers.stdout import ConsoleCallbackHandler if TYPE_CHECKING: from langsmith import Client as LangSmithClient diff --git a/libs/core/langchain_core/callbacks/stdout.py b/libs/core/langchain_core/callbacks/stdout.py index 85b61ec40eee7..e0f256efc9307 100644 --- a/libs/core/langchain_core/callbacks/stdout.py +++ b/libs/core/langchain_core/callbacks/stdout.py @@ -1,9 +1,10 @@ """Callback Handler that prints to std out.""" from typing import Any, Dict, List, Optional +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.callbacks.base import BaseCallbackHandler -from langchain_core.schema import AgentAction, AgentFinish, LLMResult -from langchain_core.utils.input import print_text +from langchain_core.outputs import LLMResult +from langchain_core.utils import print_text class StdOutCallbackHandler(BaseCallbackHandler): diff --git a/libs/core/langchain_core/callbacks/streaming_stdout.py b/libs/core/langchain_core/callbacks/streaming_stdout.py index a678e836206aa..5f701219ab253 100644 --- a/libs/core/langchain_core/callbacks/streaming_stdout.py +++ b/libs/core/langchain_core/callbacks/streaming_stdout.py @@ -2,9 +2,10 @@ import sys from typing import Any, Dict, List +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.callbacks.base import BaseCallbackHandler -from langchain_core.schema import AgentAction, AgentFinish, LLMResult -from langchain_core.schema.messages import BaseMessage +from langchain_core.messages import BaseMessage +from langchain_core.outputs import LLMResult class StreamingStdOutCallbackHandler(BaseCallbackHandler): diff --git a/libs/core/langchain_core/schema/chat_history.py b/libs/core/langchain_core/chat_history.py similarity index 96% rename from libs/core/langchain_core/schema/chat_history.py rename to libs/core/langchain_core/chat_history.py index d3e74e688242c..5cc052c6e5726 100644 --- a/libs/core/langchain_core/schema/chat_history.py +++ b/libs/core/langchain_core/chat_history.py @@ -3,7 +3,7 @@ from abc import ABC, abstractmethod from typing import List -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage class BaseChatMessageHistory(ABC): diff --git a/libs/core/langchain_core/schema/chat.py b/libs/core/langchain_core/chat_sessions.py similarity index 88% rename from libs/core/langchain_core/schema/chat.py rename to libs/core/langchain_core/chat_sessions.py index 83c0789f1fe2c..829bb14971bc7 100644 --- a/libs/core/langchain_core/schema/chat.py +++ b/libs/core/langchain_core/chat_sessions.py @@ -1,6 +1,6 @@ from typing import Sequence, TypedDict -from langchain_core.schema import BaseMessage +from langchain_core.messages import BaseMessage class ChatSession(TypedDict, total=False): diff --git a/libs/core/langchain_core/schema/document.py b/libs/core/langchain_core/document_transformers.py similarity index 78% rename from libs/core/langchain_core/schema/document.py rename to libs/core/langchain_core/document_transformers.py index 448e31532d125..908cb6ecaf685 100644 --- a/libs/core/langchain_core/schema/document.py +++ b/libs/core/langchain_core/document_transformers.py @@ -3,27 +3,9 @@ import asyncio from abc import ABC, abstractmethod from functools import partial -from typing import Any, Literal, Sequence +from typing import Any, Sequence -from langchain_core.load.serializable import Serializable -from langchain_core.pydantic_v1 import Field - - -class Document(Serializable): - """Class for storing a piece of text and associated metadata.""" - - page_content: str - """String text.""" - metadata: dict = Field(default_factory=dict) - """Arbitrary metadata about the page content (e.g., source, relationships to other - documents, etc.). - """ - type: Literal["Document"] = "Document" - - @classmethod - def is_lc_serializable(cls) -> bool: - """Return whether this class is serializable.""" - return True +from langchain_core.documents import Document class BaseDocumentTransformer(ABC): diff --git a/libs/core/langchain_core/documents.py b/libs/core/langchain_core/documents.py new file mode 100644 index 0000000000000..40b8498e78671 --- /dev/null +++ b/libs/core/langchain_core/documents.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +from typing import Literal + +from langchain_core.load.serializable import Serializable +from langchain_core.pydantic_v1 import Field + + +class Document(Serializable): + """Class for storing a piece of text and associated metadata.""" + + page_content: str + """String text.""" + metadata: dict = Field(default_factory=dict) + """Arbitrary metadata about the page content (e.g., source, relationships to other + documents, etc.). + """ + type: Literal["Document"] = "Document" + + @classmethod + def is_lc_serializable(cls) -> bool: + """Return whether this class is serializable.""" + return True diff --git a/libs/core/langchain_core/schema/embeddings.py b/libs/core/langchain_core/embeddings.py similarity index 100% rename from libs/core/langchain_core/schema/embeddings.py rename to libs/core/langchain_core/embeddings.py diff --git a/libs/core/langchain_core/prompts/example_selector/__init__.py b/libs/core/langchain_core/example_selectors/__init__.py similarity index 54% rename from libs/core/langchain_core/prompts/example_selector/__init__.py rename to libs/core/langchain_core/example_selectors/__init__.py index 02eeaf00f7c5e..c87f7701601b8 100644 --- a/libs/core/langchain_core/prompts/example_selector/__init__.py +++ b/libs/core/langchain_core/example_selectors/__init__.py @@ -1,14 +1,18 @@ """Logic for selecting examples to include in prompts.""" -from langchain_core.prompts.example_selector.length_based import ( +from langchain_core.example_selectors.base import BaseExampleSelector +from langchain_core.example_selectors.length_based import ( LengthBasedExampleSelector, ) -from langchain_core.prompts.example_selector.semantic_similarity import ( +from langchain_core.example_selectors.semantic_similarity import ( MaxMarginalRelevanceExampleSelector, SemanticSimilarityExampleSelector, + sorted_values, ) __all__ = [ + "BaseExampleSelector", "LengthBasedExampleSelector", "MaxMarginalRelevanceExampleSelector", "SemanticSimilarityExampleSelector", + "sorted_values", ] diff --git a/libs/core/langchain_core/prompts/example_selector/base.py b/libs/core/langchain_core/example_selectors/base.py similarity index 100% rename from libs/core/langchain_core/prompts/example_selector/base.py rename to libs/core/langchain_core/example_selectors/base.py diff --git a/libs/core/langchain_core/prompts/example_selector/length_based.py b/libs/core/langchain_core/example_selectors/length_based.py similarity index 96% rename from libs/core/langchain_core/prompts/example_selector/length_based.py rename to libs/core/langchain_core/example_selectors/length_based.py index 0604461d6e171..e7d0fd872bfe9 100644 --- a/libs/core/langchain_core/prompts/example_selector/length_based.py +++ b/libs/core/langchain_core/example_selectors/length_based.py @@ -2,7 +2,7 @@ import re from typing import Callable, Dict, List -from langchain_core.prompts.example_selector.base import BaseExampleSelector +from langchain_core.example_selectors.base import BaseExampleSelector from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, validator diff --git a/libs/core/langchain_core/prompts/example_selector/semantic_similarity.py b/libs/core/langchain_core/example_selectors/semantic_similarity.py similarity index 97% rename from libs/core/langchain_core/prompts/example_selector/semantic_similarity.py rename to libs/core/langchain_core/example_selectors/semantic_similarity.py index d0b7435c8aae8..1ab1c65f8fe86 100644 --- a/libs/core/langchain_core/prompts/example_selector/semantic_similarity.py +++ b/libs/core/langchain_core/example_selectors/semantic_similarity.py @@ -3,10 +3,10 @@ from typing import Any, Dict, List, Optional, Type -from langchain_core.prompts.example_selector.base import BaseExampleSelector +from langchain_core.embeddings import Embeddings +from langchain_core.example_selectors.base import BaseExampleSelector from langchain_core.pydantic_v1 import BaseModel, Extra -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore def sorted_values(values: Dict[str, str]) -> List[Any]: diff --git a/libs/core/langchain_core/exceptions.py b/libs/core/langchain_core/exceptions.py new file mode 100644 index 0000000000000..a2271da1a1558 --- /dev/null +++ b/libs/core/langchain_core/exceptions.py @@ -0,0 +1,48 @@ +from typing import Any, Optional + + +class LangChainException(Exception): + """General LangChain exception.""" + + +class TracerException(LangChainException): + """Base class for exceptions in tracers module.""" + + +class OutputParserException(ValueError, LangChainException): + """Exception that output parsers should raise to signify a parsing error. + + This exists to differentiate parsing errors from other code or execution errors + that also may arise inside the output parser. OutputParserExceptions will be + available to catch and handle in ways to fix the parsing error, while other + errors will be raised. + + Args: + error: The error that's being re-raised or an error message. + observation: String explanation of error which can be passed to a + model to try and remediate the issue. + llm_output: String model output which is error-ing. + send_to_llm: Whether to send the observation and llm_output back to an Agent + after an OutputParserException has been raised. This gives the underlying + model driving the agent the context that the previous output was improperly + structured, in the hopes that it will update the output to the correct + format. + """ + + def __init__( + self, + error: Any, + observation: Optional[str] = None, + llm_output: Optional[str] = None, + send_to_llm: bool = False, + ): + super(OutputParserException, self).__init__(error) + if send_to_llm: + if observation is None or llm_output is None: + raise ValueError( + "Arguments 'observation' & 'llm_output'" + " are required if 'send_to_llm' is True" + ) + self.observation = observation + self.llm_output = llm_output + self.send_to_llm = send_to_llm diff --git a/libs/core/langchain_core/globals/__init__.py b/libs/core/langchain_core/globals/__init__.py index da625899bab28..923de3c2e425e 100644 --- a/libs/core/langchain_core/globals/__init__.py +++ b/libs/core/langchain_core/globals/__init__.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: - from langchain_core.schema import BaseCache + from langchain_core.caches import BaseCache # DO NOT USE THESE VALUES DIRECTLY! diff --git a/libs/core/langchain_core/language_models/__init__.py b/libs/core/langchain_core/language_models/__init__.py new file mode 100644 index 0000000000000..c83892a2e9674 --- /dev/null +++ b/libs/core/langchain_core/language_models/__init__.py @@ -0,0 +1,12 @@ +from langchain_core.language_models.base import BaseLanguageModel, LanguageModelInput +from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel +from langchain_core.language_models.llms import LLM, BaseLLM + +__all__ = [ + "BaseLanguageModel", + "BaseChatModel", + "SimpleChatModel", + "BaseLLM", + "LLM", + "LanguageModelInput", +] diff --git a/libs/core/langchain_core/schema/language_model.py b/libs/core/langchain_core/language_models/base.py similarity index 97% rename from libs/core/langchain_core/schema/language_model.py rename to libs/core/langchain_core/language_models/base.py index df22e8b327654..b15e2166221ba 100644 --- a/libs/core/langchain_core/schema/language_model.py +++ b/libs/core/langchain_core/language_models/base.py @@ -15,14 +15,14 @@ from typing_extensions import TypeAlias +from langchain_core.messages import AnyMessage, BaseMessage, get_buffer_string +from langchain_core.outputs import LLMResult +from langchain_core.prompts import PromptValue from langchain_core.runnables import RunnableSerializable -from langchain_core.schema.messages import AnyMessage, BaseMessage, get_buffer_string -from langchain_core.schema.output import LLMResult -from langchain_core.schema.prompt import PromptValue from langchain_core.utils import get_pydantic_field_names if TYPE_CHECKING: - from langchain_core.callbacks.manager import Callbacks + from langchain_core.callbacks import Callbacks @lru_cache(maxsize=None) # Cache the tokenizer @@ -74,8 +74,8 @@ class BaseLanguageModel( @property def InputType(self) -> TypeAlias: """Get the input type for this runnable.""" - from langchain_core.prompts.base import StringPromptValue from langchain_core.prompts.chat import ChatPromptValueConcrete + from langchain_core.prompts.string import StringPromptValue # This is a version of LanguageModelInput which replaces the abstract # base class BaseMessage with a union of its subclasses, which makes diff --git a/libs/core/langchain_core/chat_model.py b/libs/core/langchain_core/language_models/chat_models.py similarity index 97% rename from libs/core/langchain_core/chat_model.py rename to libs/core/langchain_core/language_models/chat_models.py index ebe77711d39c4..0a1b32e444e9d 100644 --- a/libs/core/langchain_core/chat_model.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -14,36 +14,34 @@ cast, ) -from langchain_core.callbacks.base import BaseCallbackManager -from langchain_core.callbacks.manager import ( +from langchain_core.callbacks import ( AsyncCallbackManager, AsyncCallbackManagerForLLMRun, + BaseCallbackManager, CallbackManager, CallbackManagerForLLMRun, Callbacks, ) from langchain_core.globals import get_llm_cache -from langchain_core.load.dump import dumpd, dumps -from langchain_core.prompts.base import StringPromptValue -from langchain_core.prompts.chat import ChatPromptValue -from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.runnables import RunnableConfig -from langchain_core.schema import ( - ChatGeneration, - ChatResult, - LLMResult, - PromptValue, - RunInfo, -) -from langchain_core.schema.language_model import BaseLanguageModel, LanguageModelInput -from langchain_core.schema.messages import ( +from langchain_core.language_models.base import BaseLanguageModel, LanguageModelInput +from langchain_core.load import dumpd, dumps +from langchain_core.messages import ( AIMessage, AnyMessage, BaseMessage, BaseMessageChunk, HumanMessage, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ( + ChatGeneration, + ChatGenerationChunk, + ChatResult, + LLMResult, + RunInfo, +) +from langchain_core.prompts import ChatPromptValue, PromptValue, StringPromptValue +from langchain_core.pydantic_v1 import Field, root_validator +from langchain_core.runnables import RunnableConfig def _get_verbosity() -> bool: diff --git a/libs/core/langchain_core/llm.py b/libs/core/langchain_core/language_models/llms.py similarity index 98% rename from libs/core/langchain_core/llm.py rename to libs/core/langchain_core/language_models/llms.py index b48f9230a8800..eefa1d0517323 100644 --- a/libs/core/langchain_core/llm.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -46,16 +46,13 @@ Callbacks, ) from langchain_core.globals import get_llm_cache -from langchain_core.load.dump import dumpd -from langchain_core.prompts.base import StringPromptValue -from langchain_core.prompts.chat import ChatPromptValue +from langchain_core.language_models.base import BaseLanguageModel, LanguageModelInput +from langchain_core.load import dumpd +from langchain_core.messages import AIMessage, BaseMessage, get_buffer_string +from langchain_core.outputs import Generation, GenerationChunk, LLMResult, RunInfo +from langchain_core.prompts import ChatPromptValue, PromptValue, StringPromptValue from langchain_core.pydantic_v1 import Field, root_validator, validator -from langchain_core.runnables import RunnableConfig -from langchain_core.runnables.config import get_config_list -from langchain_core.schema import Generation, LLMResult, PromptValue, RunInfo -from langchain_core.schema.language_model import BaseLanguageModel, LanguageModelInput -from langchain_core.schema.messages import AIMessage, BaseMessage, get_buffer_string -from langchain_core.schema.output import GenerationChunk +from langchain_core.runnables import RunnableConfig, get_config_list logger = logging.getLogger(__name__) diff --git a/libs/core/langchain_core/schema/memory.py b/libs/core/langchain_core/memory.py similarity index 100% rename from libs/core/langchain_core/schema/memory.py rename to libs/core/langchain_core/memory.py diff --git a/libs/core/langchain_core/messages/__init__.py b/libs/core/langchain_core/messages/__init__.py new file mode 100644 index 0000000000000..cb20a40eeb434 --- /dev/null +++ b/libs/core/langchain_core/messages/__init__.py @@ -0,0 +1,120 @@ +from typing import List, Sequence, Union + +from langchain_core.messages.ai import AIMessage, AIMessageChunk +from langchain_core.messages.base import ( + BaseMessage, + BaseMessageChunk, + merge_content, + message_to_dict, + messages_to_dict, +) +from langchain_core.messages.chat import ChatMessage, ChatMessageChunk +from langchain_core.messages.function import FunctionMessage, FunctionMessageChunk +from langchain_core.messages.human import HumanMessage, HumanMessageChunk +from langchain_core.messages.system import SystemMessage, SystemMessageChunk +from langchain_core.messages.tool import ToolMessage, ToolMessageChunk + +AnyMessage = Union[ + AIMessage, HumanMessage, ChatMessage, SystemMessage, FunctionMessage, ToolMessage +] + + +def get_buffer_string( + messages: Sequence[BaseMessage], human_prefix: str = "Human", ai_prefix: str = "AI" +) -> str: + """Convert sequence of Messages to strings and concatenate them into one string. + + Args: + messages: Messages to be converted to strings. + human_prefix: The prefix to prepend to contents of HumanMessages. + ai_prefix: THe prefix to prepend to contents of AIMessages. + + Returns: + A single string concatenation of all input messages. + + Example: + .. code-block:: python + + from langchain_core import AIMessage, HumanMessage + + messages = [ + HumanMessage(content="Hi, how are you?"), + AIMessage(content="Good, how are you?"), + ] + get_buffer_string(messages) + # -> "Human: Hi, how are you?\nAI: Good, how are you?" + """ + string_messages = [] + for m in messages: + if isinstance(m, HumanMessage): + role = human_prefix + elif isinstance(m, AIMessage): + role = ai_prefix + elif isinstance(m, SystemMessage): + role = "System" + elif isinstance(m, FunctionMessage): + role = "Function" + elif isinstance(m, ChatMessage): + role = m.role + else: + raise ValueError(f"Got unsupported message type: {m}") + message = f"{role}: {m.content}" + if isinstance(m, AIMessage) and "function_call" in m.additional_kwargs: + message += f"{m.additional_kwargs['function_call']}" + string_messages.append(message) + + return "\n".join(string_messages) + + +def _message_from_dict(message: dict) -> BaseMessage: + _type = message["type"] + if _type == "human": + return HumanMessage(**message["data"]) + elif _type == "ai": + return AIMessage(**message["data"]) + elif _type == "system": + return SystemMessage(**message["data"]) + elif _type == "chat": + return ChatMessage(**message["data"]) + elif _type == "function": + return FunctionMessage(**message["data"]) + elif _type == "tool": + return ToolMessage(**message["data"]) + else: + raise ValueError(f"Got unexpected message type: {_type}") + + +def messages_from_dict(messages: Sequence[dict]) -> List[BaseMessage]: + """Convert a sequence of messages from dicts to Message objects. + + Args: + messages: Sequence of messages (as dicts) to convert. + + Returns: + List of messages (BaseMessages). + """ + return [_message_from_dict(m) for m in messages] + + +__all__ = [ + "AIMessage", + "AIMessageChunk", + "AnyMessage", + "BaseMessage", + "BaseMessageChunk", + "ChatMessage", + "ChatMessageChunk", + "FunctionMessage", + "FunctionMessageChunk", + "HumanMessage", + "HumanMessageChunk", + "SystemMessage", + "SystemMessageChunk", + "ToolMessage", + "ToolMessageChunk", + "get_buffer_string", + "messages_from_dict", + "messages_to_dict", + "message_to_dict", + "merge_content", +] diff --git a/libs/core/langchain_core/messages/ai.py b/libs/core/langchain_core/messages/ai.py new file mode 100644 index 0000000000000..66504fa74ed62 --- /dev/null +++ b/libs/core/langchain_core/messages/ai.py @@ -0,0 +1,47 @@ +from typing import Any, Literal + +from langchain_core.messages.base import ( + BaseMessage, + BaseMessageChunk, + merge_content, +) + + +class AIMessage(BaseMessage): + """A Message from an AI.""" + + example: bool = False + """Whether this Message is being passed in to the model as part of an example + conversation. + """ + + type: Literal["ai"] = "ai" + + +AIMessage.update_forward_refs() + + +class AIMessageChunk(AIMessage, BaseMessageChunk): + """A Message chunk from an AI.""" + + # Ignoring mypy re-assignment here since we're overriding the value + # to make sure that the chunk variant can be discriminated from the + # non-chunk variant. + type: Literal["AIMessageChunk"] = "AIMessageChunk" # type: ignore[assignment] # noqa: E501 + + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore + if isinstance(other, AIMessageChunk): + if self.example != other.example: + raise ValueError( + "Cannot concatenate AIMessageChunks with different example values." + ) + + return self.__class__( + example=self.example, + content=merge_content(self.content, other.content), + additional_kwargs=self._merge_kwargs_dict( + self.additional_kwargs, other.additional_kwargs + ), + ) + + return super().__add__(other) diff --git a/libs/core/langchain_core/messages/base.py b/libs/core/langchain_core/messages/base.py new file mode 100644 index 0000000000000..c790972725794 --- /dev/null +++ b/libs/core/langchain_core/messages/base.py @@ -0,0 +1,126 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, List, Sequence, Union + +from langchain_core.load.serializable import Serializable +from langchain_core.pydantic_v1 import Extra, Field + +if TYPE_CHECKING: + from langchain_core.prompts.chat import ChatPromptTemplate + + +class BaseMessage(Serializable): + """The base abstract Message class. + + Messages are the inputs and outputs of ChatModels. + """ + + content: Union[str, List[Union[str, Dict]]] + """The string contents of the message.""" + + additional_kwargs: dict = Field(default_factory=dict) + """Any additional information.""" + + type: str + + class Config: + extra = Extra.allow + + @classmethod + def is_lc_serializable(cls) -> bool: + """Return whether this class is serializable.""" + return True + + def __add__(self, other: Any) -> ChatPromptTemplate: + from langchain_core.prompts.chat import ChatPromptTemplate + + prompt = ChatPromptTemplate(messages=[self]) + return prompt + other + + +def merge_content( + first_content: Union[str, List[Union[str, Dict]]], + second_content: Union[str, List[Union[str, Dict]]], +) -> Union[str, List[Union[str, Dict]]]: + # If first chunk is a string + if isinstance(first_content, str): + # If the second chunk is also a string, then merge them naively + if isinstance(second_content, str): + return first_content + second_content + # If the second chunk is a list, add the first chunk to the start of the list + else: + return_list: List[Union[str, Dict]] = [first_content] + return return_list + second_content + # If both are lists, merge them naively + elif isinstance(second_content, List): + return first_content + second_content + # If the first content is a list, and the second content is a string + else: + # If the last element of the first content is a string + # Add the second content to the last element + if isinstance(first_content[-1], str): + return first_content[:-1] + [first_content[-1] + second_content] + else: + # Otherwise, add the second content as a new element of the list + return first_content + [second_content] + + +class BaseMessageChunk(BaseMessage): + """A Message chunk, which can be concatenated with other Message chunks.""" + + def _merge_kwargs_dict( + self, left: Dict[str, Any], right: Dict[str, Any] + ) -> Dict[str, Any]: + """Merge additional_kwargs from another BaseMessageChunk into this one.""" + merged = left.copy() + for k, v in right.items(): + if k not in merged: + merged[k] = v + elif type(merged[k]) != type(v): + raise ValueError( + f'additional_kwargs["{k}"] already exists in this message,' + " but with a different type." + ) + elif isinstance(merged[k], str): + merged[k] += v + elif isinstance(merged[k], dict): + merged[k] = self._merge_kwargs_dict(merged[k], v) + else: + raise ValueError( + f"Additional kwargs key {k} already exists in this message." + ) + return merged + + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore + if isinstance(other, BaseMessageChunk): + # If both are (subclasses of) BaseMessageChunk, + # concat into a single BaseMessageChunk + + return self.__class__( + content=merge_content(self.content, other.content), + additional_kwargs=self._merge_kwargs_dict( + self.additional_kwargs, other.additional_kwargs + ), + ) + else: + raise TypeError( + 'unsupported operand type(s) for +: "' + f"{self.__class__.__name__}" + f'" and "{other.__class__.__name__}"' + ) + + +def message_to_dict(message: BaseMessage) -> dict: + return {"type": message.type, "data": message.dict()} + + +def messages_to_dict(messages: Sequence[BaseMessage]) -> List[dict]: + """Convert a sequence of Messages to a list of dictionaries. + + Args: + messages: Sequence of messages (as BaseMessages) to convert. + + Returns: + List of messages as dicts. + """ + return [message_to_dict(m) for m in messages] diff --git a/libs/core/langchain_core/messages/chat.py b/libs/core/langchain_core/messages/chat.py new file mode 100644 index 0000000000000..173453f30ca54 --- /dev/null +++ b/libs/core/langchain_core/messages/chat.py @@ -0,0 +1,53 @@ +from typing import Any, Literal + +from langchain_core.messages.base import ( + BaseMessage, + BaseMessageChunk, + merge_content, +) + + +class ChatMessage(BaseMessage): + """A Message that can be assigned an arbitrary speaker (i.e. role).""" + + role: str + """The speaker / role of the Message.""" + + type: Literal["chat"] = "chat" + + +ChatMessage.update_forward_refs() + + +class ChatMessageChunk(ChatMessage, BaseMessageChunk): + """A Chat Message chunk.""" + + # Ignoring mypy re-assignment here since we're overriding the value + # to make sure that the chunk variant can be discriminated from the + # non-chunk variant. + type: Literal["ChatMessageChunk"] = "ChatMessageChunk" # type: ignore + + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore + if isinstance(other, ChatMessageChunk): + if self.role != other.role: + raise ValueError( + "Cannot concatenate ChatMessageChunks with different roles." + ) + + return self.__class__( + role=self.role, + content=merge_content(self.content, other.content), + additional_kwargs=self._merge_kwargs_dict( + self.additional_kwargs, other.additional_kwargs + ), + ) + elif isinstance(other, BaseMessageChunk): + return self.__class__( + role=self.role, + content=merge_content(self.content, other.content), + additional_kwargs=self._merge_kwargs_dict( + self.additional_kwargs, other.additional_kwargs + ), + ) + else: + return super().__add__(other) diff --git a/libs/core/langchain_core/messages/function.py b/libs/core/langchain_core/messages/function.py new file mode 100644 index 0000000000000..a2e8f5bb8202e --- /dev/null +++ b/libs/core/langchain_core/messages/function.py @@ -0,0 +1,45 @@ +from typing import Any, Literal + +from langchain_core.messages.base import ( + BaseMessage, + BaseMessageChunk, + merge_content, +) + + +class FunctionMessage(BaseMessage): + """A Message for passing the result of executing a function back to a model.""" + + name: str + """The name of the function that was executed.""" + + type: Literal["function"] = "function" + + +FunctionMessage.update_forward_refs() + + +class FunctionMessageChunk(FunctionMessage, BaseMessageChunk): + """A Function Message chunk.""" + + # Ignoring mypy re-assignment here since we're overriding the value + # to make sure that the chunk variant can be discriminated from the + # non-chunk variant. + type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment] + + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore + if isinstance(other, FunctionMessageChunk): + if self.name != other.name: + raise ValueError( + "Cannot concatenate FunctionMessageChunks with different names." + ) + + return self.__class__( + name=self.name, + content=merge_content(self.content, other.content), + additional_kwargs=self._merge_kwargs_dict( + self.additional_kwargs, other.additional_kwargs + ), + ) + + return super().__add__(other) diff --git a/libs/core/langchain_core/messages/human.py b/libs/core/langchain_core/messages/human.py new file mode 100644 index 0000000000000..90b3fcee89b77 --- /dev/null +++ b/libs/core/langchain_core/messages/human.py @@ -0,0 +1,26 @@ +from typing import Literal + +from langchain_core.messages.base import BaseMessage, BaseMessageChunk + + +class HumanMessage(BaseMessage): + """A Message from a human.""" + + example: bool = False + """Whether this Message is being passed in to the model as part of an example + conversation. + """ + + type: Literal["human"] = "human" + + +HumanMessage.update_forward_refs() + + +class HumanMessageChunk(HumanMessage, BaseMessageChunk): + """A Human Message chunk.""" + + # Ignoring mypy re-assignment here since we're overriding the value + # to make sure that the chunk variant can be discriminated from the + # non-chunk variant. + type: Literal["HumanMessageChunk"] = "HumanMessageChunk" # type: ignore[assignment] # noqa: E501 diff --git a/libs/core/langchain_core/messages/system.py b/libs/core/langchain_core/messages/system.py new file mode 100644 index 0000000000000..0f3f0879e21d6 --- /dev/null +++ b/libs/core/langchain_core/messages/system.py @@ -0,0 +1,23 @@ +from typing import Literal + +from langchain_core.messages.base import BaseMessage, BaseMessageChunk + + +class SystemMessage(BaseMessage): + """A Message for priming AI behavior, usually passed in as the first of a sequence + of input messages. + """ + + type: Literal["system"] = "system" + + +SystemMessage.update_forward_refs() + + +class SystemMessageChunk(SystemMessage, BaseMessageChunk): + """A System Message chunk.""" + + # Ignoring mypy re-assignment here since we're overriding the value + # to make sure that the chunk variant can be discriminated from the + # non-chunk variant. + type: Literal["SystemMessageChunk"] = "SystemMessageChunk" # type: ignore[assignment] # noqa: E501 diff --git a/libs/core/langchain_core/messages/tool.py b/libs/core/langchain_core/messages/tool.py new file mode 100644 index 0000000000000..3c1a19532ba64 --- /dev/null +++ b/libs/core/langchain_core/messages/tool.py @@ -0,0 +1,45 @@ +from typing import Any, Literal + +from langchain_core.messages.base import ( + BaseMessage, + BaseMessageChunk, + merge_content, +) + + +class ToolMessage(BaseMessage): + """A Message for passing the result of executing a tool back to a model.""" + + tool_call_id: str + """Tool call that this message is responding to.""" + + type: Literal["tool"] = "tool" + + +ToolMessage.update_forward_refs() + + +class ToolMessageChunk(ToolMessage, BaseMessageChunk): + """A Tool Message chunk.""" + + # Ignoring mypy re-assignment here since we're overriding the value + # to make sure that the chunk variant can be discriminated from the + # non-chunk variant. + type: Literal["ToolMessageChunk"] = "ToolMessageChunk" # type: ignore[assignment] + + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore + if isinstance(other, ToolMessageChunk): + if self.tool_call_id != other.tool_call_id: + raise ValueError( + "Cannot concatenate ToolMessageChunks with different names." + ) + + return self.__class__( + tool_call_id=self.tool_call_id, + content=merge_content(self.content, other.content), + additional_kwargs=self._merge_kwargs_dict( + self.additional_kwargs, other.additional_kwargs + ), + ) + + return super().__add__(other) diff --git a/libs/core/langchain_core/output_parsers/__init__.py b/libs/core/langchain_core/output_parsers/__init__.py index e69de29bb2d1d..72039737be809 100644 --- a/libs/core/langchain_core/output_parsers/__init__.py +++ b/libs/core/langchain_core/output_parsers/__init__.py @@ -0,0 +1,29 @@ +from langchain_core.output_parsers.base import ( + BaseGenerationOutputParser, + BaseLLMOutputParser, + BaseOutputParser, +) +from langchain_core.output_parsers.list import ( + CommaSeparatedListOutputParser, + ListOutputParser, + MarkdownListOutputParser, + NumberedListOutputParser, +) +from langchain_core.output_parsers.str import StrOutputParser +from langchain_core.output_parsers.transform import ( + BaseCumulativeTransformOutputParser, + BaseTransformOutputParser, +) + +__all__ = [ + "BaseLLMOutputParser", + "BaseGenerationOutputParser", + "BaseOutputParser", + "ListOutputParser", + "CommaSeparatedListOutputParser", + "NumberedListOutputParser", + "MarkdownListOutputParser", + "StrOutputParser", + "BaseTransformOutputParser", + "BaseCumulativeTransformOutputParser", +] diff --git a/libs/core/langchain_core/schema/output_parser.py b/libs/core/langchain_core/output_parsers/base.py similarity index 60% rename from libs/core/langchain_core/schema/output_parser.py rename to libs/core/langchain_core/output_parsers/base.py index 5dd2ebfb2ace2..ff21255a84c7f 100644 --- a/libs/core/langchain_core/schema/output_parser.py +++ b/libs/core/langchain_core/output_parsers/base.py @@ -5,10 +5,8 @@ from abc import ABC, abstractmethod from typing import ( Any, - AsyncIterator, Dict, Generic, - Iterator, List, Optional, Type, @@ -18,15 +16,13 @@ from typing_extensions import get_args -from langchain_core.runnables import RunnableConfig, RunnableSerializable -from langchain_core.schema.messages import AnyMessage, BaseMessage, BaseMessageChunk -from langchain_core.schema.output import ( +from langchain_core.messages import AnyMessage, BaseMessage +from langchain_core.outputs import ( ChatGeneration, - ChatGenerationChunk, Generation, - GenerationChunk, ) -from langchain_core.schema.prompt import PromptValue +from langchain_core.prompts.value import PromptValue +from langchain_core.runnables import RunnableConfig, RunnableSerializable T = TypeVar("T") @@ -303,173 +299,3 @@ def dict(self, **kwargs: Any) -> Dict: except NotImplementedError: pass return output_parser_dict - - -class BaseTransformOutputParser(BaseOutputParser[T]): - """Base class for an output parser that can handle streaming input.""" - - def _transform(self, input: Iterator[Union[str, BaseMessage]]) -> Iterator[T]: - for chunk in input: - if isinstance(chunk, BaseMessage): - yield self.parse_result([ChatGeneration(message=chunk)]) - else: - yield self.parse_result([Generation(text=chunk)]) - - async def _atransform( - self, input: AsyncIterator[Union[str, BaseMessage]] - ) -> AsyncIterator[T]: - async for chunk in input: - if isinstance(chunk, BaseMessage): - yield self.parse_result([ChatGeneration(message=chunk)]) - else: - yield self.parse_result([Generation(text=chunk)]) - - def transform( - self, - input: Iterator[Union[str, BaseMessage]], - config: Optional[RunnableConfig] = None, - **kwargs: Any, - ) -> Iterator[T]: - yield from self._transform_stream_with_config( - input, self._transform, config, run_type="parser" - ) - - async def atransform( - self, - input: AsyncIterator[Union[str, BaseMessage]], - config: Optional[RunnableConfig] = None, - **kwargs: Any, - ) -> AsyncIterator[T]: - async for chunk in self._atransform_stream_with_config( - input, self._atransform, config, run_type="parser" - ): - yield chunk - - -class BaseCumulativeTransformOutputParser(BaseTransformOutputParser[T]): - """Base class for an output parser that can handle streaming input.""" - - diff: bool = False - """In streaming mode, whether to yield diffs between the previous and current - parsed output, or just the current parsed output. - """ - - def _diff(self, prev: Optional[T], next: T) -> T: - """Convert parsed outputs into a diff format. The semantics of this are - up to the output parser.""" - raise NotImplementedError() - - def _transform(self, input: Iterator[Union[str, BaseMessage]]) -> Iterator[Any]: - prev_parsed = None - acc_gen = None - for chunk in input: - if isinstance(chunk, BaseMessageChunk): - chunk_gen: Generation = ChatGenerationChunk(message=chunk) - elif isinstance(chunk, BaseMessage): - chunk_gen = ChatGenerationChunk( - message=BaseMessageChunk(**chunk.dict()) - ) - else: - chunk_gen = GenerationChunk(text=chunk) - - if acc_gen is None: - acc_gen = chunk_gen - else: - acc_gen += chunk_gen - - parsed = self.parse_result([acc_gen], partial=True) - if parsed is not None and parsed != prev_parsed: - if self.diff: - yield self._diff(prev_parsed, parsed) - else: - yield parsed - prev_parsed = parsed - - async def _atransform( - self, input: AsyncIterator[Union[str, BaseMessage]] - ) -> AsyncIterator[T]: - prev_parsed = None - acc_gen = None - async for chunk in input: - if isinstance(chunk, BaseMessageChunk): - chunk_gen: Generation = ChatGenerationChunk(message=chunk) - elif isinstance(chunk, BaseMessage): - chunk_gen = ChatGenerationChunk( - message=BaseMessageChunk(**chunk.dict()) - ) - else: - chunk_gen = GenerationChunk(text=chunk) - - if acc_gen is None: - acc_gen = chunk_gen - else: - acc_gen += chunk_gen - - parsed = self.parse_result([acc_gen], partial=True) - if parsed is not None and parsed != prev_parsed: - if self.diff: - yield self._diff(prev_parsed, parsed) - else: - yield parsed - prev_parsed = parsed - - -class StrOutputParser(BaseTransformOutputParser[str]): - """OutputParser that parses LLMResult into the top likely string.""" - - @classmethod - def is_lc_serializable(cls) -> bool: - """Return whether this class is serializable.""" - return True - - @property - def _type(self) -> str: - """Return the output parser type for serialization.""" - return "default" - - def parse(self, text: str) -> str: - """Returns the input text with no changes.""" - return text - - -# TODO: Deprecate -NoOpOutputParser = StrOutputParser - - -class OutputParserException(ValueError): - """Exception that output parsers should raise to signify a parsing error. - - This exists to differentiate parsing errors from other code or execution errors - that also may arise inside the output parser. OutputParserExceptions will be - available to catch and handle in ways to fix the parsing error, while other - errors will be raised. - - Args: - error: The error that's being re-raised or an error message. - observation: String explanation of error which can be passed to a - model to try and remediate the issue. - llm_output: String model output which is error-ing. - send_to_llm: Whether to send the observation and llm_output back to an Agent - after an OutputParserException has been raised. This gives the underlying - model driving the agent the context that the previous output was improperly - structured, in the hopes that it will update the output to the correct - format. - """ - - def __init__( - self, - error: Any, - observation: Optional[str] = None, - llm_output: Optional[str] = None, - send_to_llm: bool = False, - ): - super(OutputParserException, self).__init__(error) - if send_to_llm: - if observation is None or llm_output is None: - raise ValueError( - "Arguments 'observation' & 'llm_output'" - " are required if 'send_to_llm' is True" - ) - self.observation = observation - self.llm_output = llm_output - self.send_to_llm = send_to_llm diff --git a/libs/core/langchain_core/output_parsers/list.py b/libs/core/langchain_core/output_parsers/list.py index 079d204a4e0fb..9710ce82b946f 100644 --- a/libs/core/langchain_core/output_parsers/list.py +++ b/libs/core/langchain_core/output_parsers/list.py @@ -4,7 +4,7 @@ from abc import abstractmethod from typing import List -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers.base import BaseOutputParser class ListOutputParser(BaseOutputParser[List[str]]): diff --git a/libs/core/langchain_core/output_parsers/str.py b/libs/core/langchain_core/output_parsers/str.py new file mode 100644 index 0000000000000..704b21472a08d --- /dev/null +++ b/libs/core/langchain_core/output_parsers/str.py @@ -0,0 +1,19 @@ +from langchain_core.output_parsers.transform import BaseTransformOutputParser + + +class StrOutputParser(BaseTransformOutputParser[str]): + """OutputParser that parses LLMResult into the top likely string.""" + + @classmethod + def is_lc_serializable(cls) -> bool: + """Return whether this class is serializable.""" + return True + + @property + def _type(self) -> str: + """Return the output parser type for serialization.""" + return "default" + + def parse(self, text: str) -> str: + """Returns the input text with no changes.""" + return text diff --git a/libs/core/langchain_core/output_parsers/transform.py b/libs/core/langchain_core/output_parsers/transform.py new file mode 100644 index 0000000000000..25b45ac776553 --- /dev/null +++ b/libs/core/langchain_core/output_parsers/transform.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +from typing import ( + Any, + AsyncIterator, + Iterator, + Optional, + Union, +) + +from langchain_core.messages import BaseMessage, BaseMessageChunk +from langchain_core.output_parsers.base import BaseOutputParser, T +from langchain_core.outputs import ( + ChatGeneration, + ChatGenerationChunk, + Generation, + GenerationChunk, +) +from langchain_core.runnables import RunnableConfig + + +class BaseTransformOutputParser(BaseOutputParser[T]): + """Base class for an output parser that can handle streaming input.""" + + def _transform(self, input: Iterator[Union[str, BaseMessage]]) -> Iterator[T]: + for chunk in input: + if isinstance(chunk, BaseMessage): + yield self.parse_result([ChatGeneration(message=chunk)]) + else: + yield self.parse_result([Generation(text=chunk)]) + + async def _atransform( + self, input: AsyncIterator[Union[str, BaseMessage]] + ) -> AsyncIterator[T]: + async for chunk in input: + if isinstance(chunk, BaseMessage): + yield self.parse_result([ChatGeneration(message=chunk)]) + else: + yield self.parse_result([Generation(text=chunk)]) + + def transform( + self, + input: Iterator[Union[str, BaseMessage]], + config: Optional[RunnableConfig] = None, + **kwargs: Any, + ) -> Iterator[T]: + yield from self._transform_stream_with_config( + input, self._transform, config, run_type="parser" + ) + + async def atransform( + self, + input: AsyncIterator[Union[str, BaseMessage]], + config: Optional[RunnableConfig] = None, + **kwargs: Any, + ) -> AsyncIterator[T]: + async for chunk in self._atransform_stream_with_config( + input, self._atransform, config, run_type="parser" + ): + yield chunk + + +class BaseCumulativeTransformOutputParser(BaseTransformOutputParser[T]): + """Base class for an output parser that can handle streaming input.""" + + diff: bool = False + """In streaming mode, whether to yield diffs between the previous and current + parsed output, or just the current parsed output. + """ + + def _diff(self, prev: Optional[T], next: T) -> T: + """Convert parsed outputs into a diff format. The semantics of this are + up to the output parser.""" + raise NotImplementedError() + + def _transform(self, input: Iterator[Union[str, BaseMessage]]) -> Iterator[Any]: + prev_parsed = None + acc_gen = None + for chunk in input: + if isinstance(chunk, BaseMessageChunk): + chunk_gen: Generation = ChatGenerationChunk(message=chunk) + elif isinstance(chunk, BaseMessage): + chunk_gen = ChatGenerationChunk( + message=BaseMessageChunk(**chunk.dict()) + ) + else: + chunk_gen = GenerationChunk(text=chunk) + + if acc_gen is None: + acc_gen = chunk_gen + else: + acc_gen += chunk_gen + + parsed = self.parse_result([acc_gen], partial=True) + if parsed is not None and parsed != prev_parsed: + if self.diff: + yield self._diff(prev_parsed, parsed) + else: + yield parsed + prev_parsed = parsed + + async def _atransform( + self, input: AsyncIterator[Union[str, BaseMessage]] + ) -> AsyncIterator[T]: + prev_parsed = None + acc_gen = None + async for chunk in input: + if isinstance(chunk, BaseMessageChunk): + chunk_gen: Generation = ChatGenerationChunk(message=chunk) + elif isinstance(chunk, BaseMessage): + chunk_gen = ChatGenerationChunk( + message=BaseMessageChunk(**chunk.dict()) + ) + else: + chunk_gen = GenerationChunk(text=chunk) + + if acc_gen is None: + acc_gen = chunk_gen + else: + acc_gen += chunk_gen + + parsed = self.parse_result([acc_gen], partial=True) + if parsed is not None and parsed != prev_parsed: + if self.diff: + yield self._diff(prev_parsed, parsed) + else: + yield parsed + prev_parsed = parsed diff --git a/libs/core/langchain_core/outputs/__init__.py b/libs/core/langchain_core/outputs/__init__.py new file mode 100644 index 0000000000000..18ee2b816eb6c --- /dev/null +++ b/libs/core/langchain_core/outputs/__init__.py @@ -0,0 +1,15 @@ +from langchain_core.outputs.chat_generation import ChatGeneration, ChatGenerationChunk +from langchain_core.outputs.chat_result import ChatResult +from langchain_core.outputs.generation import Generation, GenerationChunk +from langchain_core.outputs.llm_result import LLMResult +from langchain_core.outputs.run_info import RunInfo + +__all__ = [ + "ChatGeneration", + "ChatGenerationChunk", + "ChatResult", + "Generation", + "GenerationChunk", + "LLMResult", + "RunInfo", +] diff --git a/libs/core/langchain_core/outputs/chat_generation.py b/libs/core/langchain_core/outputs/chat_generation.py new file mode 100644 index 0000000000000..a604996410cfb --- /dev/null +++ b/libs/core/langchain_core/outputs/chat_generation.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from typing import Any, Dict, Literal + +from langchain_core.messages import BaseMessage, BaseMessageChunk +from langchain_core.outputs.generation import Generation +from langchain_core.pydantic_v1 import root_validator + + +class ChatGeneration(Generation): + """A single chat generation output.""" + + text: str = "" + """*SHOULD NOT BE SET DIRECTLY* The text contents of the output message.""" + message: BaseMessage + """The message output by the chat model.""" + # Override type to be ChatGeneration, ignore mypy error as this is intentional + type: Literal["ChatGeneration"] = "ChatGeneration" # type: ignore[assignment] + """Type is used exclusively for serialization purposes.""" + + @root_validator + def set_text(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Set the text attribute to be the contents of the message.""" + try: + values["text"] = values["message"].content + except (KeyError, AttributeError) as e: + raise ValueError("Error while initializing ChatGeneration") from e + return values + + +class ChatGenerationChunk(ChatGeneration): + """A ChatGeneration chunk, which can be concatenated with other + ChatGeneration chunks. + + Attributes: + message: The message chunk output by the chat model. + """ + + message: BaseMessageChunk + # Override type to be ChatGeneration, ignore mypy error as this is intentional + type: Literal["ChatGenerationChunk"] = "ChatGenerationChunk" # type: ignore[assignment] # noqa: E501 + """Type is used exclusively for serialization purposes.""" + + def __add__(self, other: ChatGenerationChunk) -> ChatGenerationChunk: + if isinstance(other, ChatGenerationChunk): + generation_info = ( + {**(self.generation_info or {}), **(other.generation_info or {})} + if self.generation_info is not None or other.generation_info is not None + else None + ) + return ChatGenerationChunk( + message=self.message + other.message, + generation_info=generation_info, + ) + else: + raise TypeError( + f"unsupported operand type(s) for +: '{type(self)}' and '{type(other)}'" + ) diff --git a/libs/core/langchain_core/outputs/chat_result.py b/libs/core/langchain_core/outputs/chat_result.py new file mode 100644 index 0000000000000..f985ca82753ee --- /dev/null +++ b/libs/core/langchain_core/outputs/chat_result.py @@ -0,0 +1,15 @@ +from typing import List, Optional + +from langchain_core.outputs.chat_generation import ChatGeneration +from langchain_core.pydantic_v1 import BaseModel + + +class ChatResult(BaseModel): + """Class that contains all results for a single chat model call.""" + + generations: List[ChatGeneration] + """List of the chat generations. This is a List because an input can have multiple + candidate generations. + """ + llm_output: Optional[dict] = None + """For arbitrary LLM provider specific output.""" diff --git a/libs/core/langchain_core/outputs/generation.py b/libs/core/langchain_core/outputs/generation.py new file mode 100644 index 0000000000000..67446acca457f --- /dev/null +++ b/libs/core/langchain_core/outputs/generation.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from typing import Any, Dict, Literal, Optional + +from langchain_core.load import Serializable + + +class Generation(Serializable): + """A single text generation output.""" + + text: str + """Generated text output.""" + + generation_info: Optional[Dict[str, Any]] = None + """Raw response from the provider. May include things like the + reason for finishing or token log probabilities. + """ + type: Literal["Generation"] = "Generation" + """Type is used exclusively for serialization purposes.""" + # TODO: add log probs as separate attribute + + @classmethod + def is_lc_serializable(cls) -> bool: + """Return whether this class is serializable.""" + return True + + +class GenerationChunk(Generation): + """A Generation chunk, which can be concatenated with other Generation chunks.""" + + def __add__(self, other: GenerationChunk) -> GenerationChunk: + if isinstance(other, GenerationChunk): + generation_info = ( + {**(self.generation_info or {}), **(other.generation_info or {})} + if self.generation_info is not None or other.generation_info is not None + else None + ) + return GenerationChunk( + text=self.text + other.text, + generation_info=generation_info, + ) + else: + raise TypeError( + f"unsupported operand type(s) for +: '{type(self)}' and '{type(other)}'" + ) diff --git a/libs/core/langchain_core/outputs/llm_result.py b/libs/core/langchain_core/outputs/llm_result.py new file mode 100644 index 0000000000000..d1df4c8304601 --- /dev/null +++ b/libs/core/langchain_core/outputs/llm_result.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from copy import deepcopy +from typing import List, Optional + +from langchain_core.outputs.generation import Generation +from langchain_core.outputs.run_info import RunInfo +from langchain_core.pydantic_v1 import BaseModel + + +class LLMResult(BaseModel): + """Class that contains all results for a batched LLM call.""" + + generations: List[List[Generation]] + """List of generated outputs. This is a List[List[]] because + each input could have multiple candidate generations.""" + llm_output: Optional[dict] = None + """Arbitrary LLM provider-specific output.""" + run: Optional[List[RunInfo]] = None + """List of metadata info for model call for each input.""" + + def flatten(self) -> List[LLMResult]: + """Flatten generations into a single list. + + Unpack List[List[Generation]] -> List[LLMResult] where each returned LLMResult + contains only a single Generation. If token usage information is available, + it is kept only for the LLMResult corresponding to the top-choice + Generation, to avoid over-counting of token usage downstream. + + Returns: + List of LLMResults where each returned LLMResult contains a single + Generation. + """ + llm_results = [] + for i, gen_list in enumerate(self.generations): + # Avoid double counting tokens in OpenAICallback + if i == 0: + llm_results.append( + LLMResult( + generations=[gen_list], + llm_output=self.llm_output, + ) + ) + else: + if self.llm_output is not None: + llm_output = deepcopy(self.llm_output) + llm_output["token_usage"] = dict() + else: + llm_output = None + llm_results.append( + LLMResult( + generations=[gen_list], + llm_output=llm_output, + ) + ) + return llm_results + + def __eq__(self, other: object) -> bool: + """Check for LLMResult equality by ignoring any metadata related to runs.""" + if not isinstance(other, LLMResult): + return NotImplemented + return ( + self.generations == other.generations + and self.llm_output == other.llm_output + ) diff --git a/libs/core/langchain_core/outputs/run_info.py b/libs/core/langchain_core/outputs/run_info.py new file mode 100644 index 0000000000000..0c6c69826bd5b --- /dev/null +++ b/libs/core/langchain_core/outputs/run_info.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +from uuid import UUID + +from langchain_core.pydantic_v1 import BaseModel + + +class RunInfo(BaseModel): + """Class that contains metadata for a single execution of a Chain or model.""" + + run_id: UUID + """A unique identifier for the model or chain run.""" diff --git a/libs/core/langchain_core/prompts/__init__.py b/libs/core/langchain_core/prompts/__init__.py index 606255c067de4..d0ac34a7fac19 100644 --- a/libs/core/langchain_core/prompts/__init__.py +++ b/libs/core/langchain_core/prompts/__init__.py @@ -27,21 +27,18 @@ ChatPromptValue """ # noqa: E501 -from langchain_core.prompts.base import StringPromptTemplate +from langchain_core.prompts.base import BasePromptTemplate, format_document from langchain_core.prompts.chat import ( AIMessagePromptTemplate, BaseChatPromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, + ChatPromptValue, + ChatPromptValueConcrete, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ) -from langchain_core.prompts.example_selector import ( - LengthBasedExampleSelector, - MaxMarginalRelevanceExampleSelector, - SemanticSimilarityExampleSelector, -) from langchain_core.prompts.few_shot import ( FewShotChatMessagePromptTemplate, FewShotPromptTemplate, @@ -50,7 +47,7 @@ from langchain_core.prompts.loading import load_prompt from langchain_core.prompts.pipeline import PipelinePromptTemplate from langchain_core.prompts.prompt import Prompt, PromptTemplate -from langchain_core.schema.prompt_template import BasePromptTemplate +from langchain_core.prompts.string import StringPromptTemplate, StringPromptValue __all__ = [ "AIMessagePromptTemplate", @@ -58,18 +55,22 @@ "BasePromptTemplate", "ChatMessagePromptTemplate", "ChatPromptTemplate", + "ChatPromptValue", + "ChatPromptValueConcrete", "FewShotPromptTemplate", "FewShotPromptWithTemplates", + "FewShotChatMessagePromptTemplate", "HumanMessagePromptTemplate", - "LengthBasedExampleSelector", - "MaxMarginalRelevanceExampleSelector", "MessagesPlaceholder", "PipelinePromptTemplate", "Prompt", "PromptTemplate", - "SemanticSimilarityExampleSelector", + "PromptValue", + "StringPromptValue", "StringPromptTemplate", "SystemMessagePromptTemplate", "load_prompt", - "FewShotChatMessagePromptTemplate", + "format_document", ] + +from langchain_core.prompts.value import PromptValue diff --git a/libs/core/langchain_core/prompts/base.py b/libs/core/langchain_core/prompts/base.py index 6a1cda6fedf89..1dc9955f330b6 100644 --- a/libs/core/langchain_core/prompts/base.py +++ b/libs/core/langchain_core/prompts/base.py @@ -1,173 +1,228 @@ -"""BasePrompt schema definition.""" from __future__ import annotations -import warnings -from abc import ABC -from string import Formatter -from typing import Any, Callable, Dict, List, Literal, Set - -from langchain_core.schema.messages import BaseMessage, HumanMessage -from langchain_core.schema.prompt import PromptValue -from langchain_core.schema.prompt_template import BasePromptTemplate -from langchain_core.utils.formatting import formatter - - -def jinja2_formatter(template: str, **kwargs: Any) -> str: - """Format a template using jinja2. - - *Security warning*: As of LangChain 0.0.329, this method uses Jinja2's - SandboxedEnvironment by default. However, this sand-boxing should - be treated as a best-effort approach rather than a guarantee of security. - Do not accept jinja2 templates from untrusted sources as they may lead - to arbitrary Python code execution. - - https://jinja.palletsprojects.com/en/3.1.x/sandbox/ - """ - try: - from jinja2.sandbox import SandboxedEnvironment - except ImportError: - raise ImportError( - "jinja2 not installed, which is needed to use the jinja2_formatter. " - "Please install it with `pip install jinja2`." - "Please be cautious when using jinja2 templates. " - "Do not expand jinja2 templates using unverified or user-controlled " - "inputs as that can result in arbitrary Python code execution." +import json +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Any, Callable, Dict, List, Mapping, Optional, Type, Union + +import yaml + +from langchain_core.documents import Document +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts.value import PromptValue +from langchain_core.pydantic_v1 import BaseModel, Field, create_model, root_validator +from langchain_core.runnables import RunnableConfig, RunnableSerializable + + +class BasePromptTemplate(RunnableSerializable[Dict, PromptValue], ABC): + """Base class for all prompt templates, returning a prompt.""" + + input_variables: List[str] + """A list of the names of the variables the prompt template expects.""" + input_types: Dict[str, Any] = Field(default_factory=dict) + """A dictionary of the types of the variables the prompt template expects. + If not provided, all variables are assumed to be strings.""" + output_parser: Optional[BaseOutputParser] = None + """How to parse the output of calling an LLM on this formatted prompt.""" + partial_variables: Mapping[str, Union[str, Callable[[], str]]] = Field( + default_factory=dict + ) + + @classmethod + def is_lc_serializable(cls) -> bool: + """Return whether this class is serializable.""" + return True + + class Config: + """Configuration for this pydantic object.""" + + arbitrary_types_allowed = True + + @property + def OutputType(self) -> Any: + from langchain_core.prompts.chat import ChatPromptValueConcrete + from langchain_core.prompts.string import StringPromptValue + + return Union[StringPromptValue, ChatPromptValueConcrete] + + def get_input_schema( + self, config: Optional[RunnableConfig] = None + ) -> Type[BaseModel]: + # This is correct, but pydantic typings/mypy don't think so. + return create_model( # type: ignore[call-overload] + "PromptInput", + **{k: (self.input_types.get(k, str), None) for k in self.input_variables}, ) - # This uses a sandboxed environment to prevent arbitrary code execution. - # Jinja2 uses an opt-out rather than opt-in approach for sand-boxing. - # Please treat this sand-boxing as a best-effort approach rather than - # a guarantee of security. - # We recommend to never use jinja2 templates with untrusted inputs. - # https://jinja.palletsprojects.com/en/3.1.x/sandbox/ - # approach not a guarantee of security. - return SandboxedEnvironment().from_string(template).render(**kwargs) - + def invoke( + self, input: Dict, config: Optional[RunnableConfig] = None + ) -> PromptValue: + return self._call_with_config( + lambda inner_input: self.format_prompt( + **{key: inner_input[key] for key in self.input_variables} + ), + input, + config, + run_type="prompt", + ) -def validate_jinja2(template: str, input_variables: List[str]) -> None: - """ - Validate that the input variables are valid for the template. - Issues a warning if missing or extra variables are found. + @abstractmethod + def format_prompt(self, **kwargs: Any) -> PromptValue: + """Create Chat Messages.""" - Args: - template: The template string. - input_variables: The input variables. - """ - input_variables_set = set(input_variables) - valid_variables = _get_jinja2_variables_from_template(template) - missing_variables = valid_variables - input_variables_set - extra_variables = input_variables_set - valid_variables + @root_validator() + def validate_variable_names(cls, values: Dict) -> Dict: + """Validate variable names do not include restricted names.""" + if "stop" in values["input_variables"]: + raise ValueError( + "Cannot have an input variable named 'stop', as it is used internally," + " please rename." + ) + if "stop" in values["partial_variables"]: + raise ValueError( + "Cannot have an partial variable named 'stop', as it is used " + "internally, please rename." + ) + + overall = set(values["input_variables"]).intersection( + values["partial_variables"] + ) + if overall: + raise ValueError( + f"Found overlapping input and partial variables: {overall}" + ) + return values + + def partial(self, **kwargs: Union[str, Callable[[], str]]) -> BasePromptTemplate: + """Return a partial of the prompt template.""" + prompt_dict = self.__dict__.copy() + prompt_dict["input_variables"] = list( + set(self.input_variables).difference(kwargs) + ) + prompt_dict["partial_variables"] = {**self.partial_variables, **kwargs} + return type(self)(**prompt_dict) + + def _merge_partial_and_user_variables(self, **kwargs: Any) -> Dict[str, Any]: + # Get partial params: + partial_kwargs = { + k: v if isinstance(v, str) else v() + for k, v in self.partial_variables.items() + } + return {**partial_kwargs, **kwargs} - warning_message = "" - if missing_variables: - warning_message += f"Missing variables: {missing_variables} " + @abstractmethod + def format(self, **kwargs: Any) -> str: + """Format the prompt with the inputs. - if extra_variables: - warning_message += f"Extra variables: {extra_variables}" + Args: + kwargs: Any arguments to be passed to the prompt template. - if warning_message: - warnings.warn(warning_message.strip()) + Returns: + A formatted string. + Example: -def _get_jinja2_variables_from_template(template: str) -> Set[str]: - try: - from jinja2 import Environment, meta - except ImportError: - raise ImportError( - "jinja2 not installed, which is needed to use the jinja2_formatter. " - "Please install it with `pip install jinja2`." - ) - env = Environment() - ast = env.parse(template) - variables = meta.find_undeclared_variables(ast) - return variables + .. code-block:: python + prompt.format(variable1="foo") + """ -DEFAULT_FORMATTER_MAPPING: Dict[str, Callable] = { - "f-string": formatter.format, - "jinja2": jinja2_formatter, -} + @property + def _prompt_type(self) -> str: + """Return the prompt type key.""" + raise NotImplementedError -DEFAULT_VALIDATOR_MAPPING: Dict[str, Callable] = { - "f-string": formatter.validate_input_variables, - "jinja2": validate_jinja2, -} + def dict(self, **kwargs: Any) -> Dict: + """Return dictionary representation of prompt.""" + prompt_dict = super().dict(**kwargs) + try: + prompt_dict["_type"] = self._prompt_type + except NotImplementedError: + pass + return prompt_dict + def save(self, file_path: Union[Path, str]) -> None: + """Save the prompt. -def check_valid_template( - template: str, template_format: str, input_variables: List[str] -) -> None: - """Check that template string is valid. + Args: + file_path: Path to directory to save prompt to. - Args: - template: The template string. - template_format: The template format. Should be one of "f-string" or "jinja2". - input_variables: The input variables. + Example: + .. code-block:: python - Raises: - ValueError: If the template format is not supported. - """ - if template_format not in DEFAULT_FORMATTER_MAPPING: - valid_formats = list(DEFAULT_FORMATTER_MAPPING) - raise ValueError( - f"Invalid template format. Got `{template_format}`;" - f" should be one of {valid_formats}" - ) - try: - validator_func = DEFAULT_VALIDATOR_MAPPING[template_format] - validator_func(template, input_variables) - except KeyError as e: - raise ValueError( - "Invalid prompt schema; check for mismatched or missing input parameters. " - + str(e) - ) + prompt.save(file_path="path/prompt.yaml") + """ + if self.partial_variables: + raise ValueError("Cannot save prompt with partial variables.") + # Fetch dictionary to save + prompt_dict = self.dict() + if "_type" not in prompt_dict: + raise NotImplementedError(f"Prompt {self} does not support saving.") -def get_template_variables(template: str, template_format: str) -> List[str]: - """Get the variables from the template. + # Convert file to Path object. + if isinstance(file_path, str): + save_path = Path(file_path) + else: + save_path = file_path - Args: - template: The template string. - template_format: The template format. Should be one of "f-string" or "jinja2". + directory_path = save_path.parent + directory_path.mkdir(parents=True, exist_ok=True) - Returns: - The variables from the template. + if save_path.suffix == ".json": + with open(file_path, "w") as f: + json.dump(prompt_dict, f, indent=4) + elif save_path.suffix == ".yaml": + with open(file_path, "w") as f: + yaml.dump(prompt_dict, f, default_flow_style=False) + else: + raise ValueError(f"{save_path} must be json or yaml") - Raises: - ValueError: If the template format is not supported. - """ - if template_format == "jinja2": - # Get the variables for the template - input_variables = _get_jinja2_variables_from_template(template) - elif template_format == "f-string": - input_variables = { - v for _, v, _, _ in Formatter().parse(template) if v is not None - } - else: - raise ValueError(f"Unsupported template format: {template_format}") - return sorted(input_variables) +def format_document(doc: Document, prompt: BasePromptTemplate) -> str: + """Format a document into a string based on a prompt template. + First, this pulls information from the document from two sources: -class StringPromptValue(PromptValue): - """String prompt value.""" + 1. `page_content`: + This takes the information from the `document.page_content` + and assigns it to a variable named `page_content`. + 2. metadata: + This takes information from `document.metadata` and assigns + it to variables of the same name. - text: str - """Prompt text.""" - type: Literal["StringPromptValue"] = "StringPromptValue" + Those variables are then passed into the `prompt` to produce a formatted string. - def to_string(self) -> str: - """Return prompt as string.""" - return self.text + Args: + doc: Document, the page_content and metadata will be used to create + the final string. + prompt: BasePromptTemplate, will be used to format the page_content + and metadata into the final string. - def to_messages(self) -> List[BaseMessage]: - """Return prompt as messages.""" - return [HumanMessage(content=self.text)] + Returns: + string of the document formatted. + Example: + .. code-block:: python -class StringPromptTemplate(BasePromptTemplate, ABC): - """String prompt that exposes the format method, returning a prompt.""" + from langchain_core import Document + from langchain_core.prompts import PromptTemplate - def format_prompt(self, **kwargs: Any) -> PromptValue: - """Create Chat Messages.""" - return StringPromptValue(text=self.format(**kwargs)) + doc = Document(page_content="This is a joke", metadata={"page": "1"}) + prompt = PromptTemplate.from_template("Page {page}: {page_content}") + format_document(doc, prompt) + >>> "Page 1: This is a joke" + """ + base_info = {"page_content": doc.page_content, **doc.metadata} + missing_metadata = set(prompt.input_variables).difference(base_info) + if len(missing_metadata) > 0: + required_metadata = [ + iv for iv in prompt.input_variables if iv != "page_content" + ] + raise ValueError( + f"Document prompt requires documents to have metadata variables: " + f"{required_metadata}. Received document with missing metadata: " + f"{list(missing_metadata)}." + ) + document_info = {k: base_info[k] for k in prompt.input_variables} + return prompt.format(**document_info) diff --git a/libs/core/langchain_core/prompts/chat.py b/libs/core/langchain_core/prompts/chat.py index 01e2ebde98bc9..0f88ba1226fad 100644 --- a/libs/core/langchain_core/prompts/chat.py +++ b/libs/core/langchain_core/prompts/chat.py @@ -19,15 +19,8 @@ ) from langchain_core._api import deprecated -from langchain_core.load.serializable import Serializable -from langchain_core.prompts.base import StringPromptTemplate -from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import ( - BasePromptTemplate, - PromptValue, -) -from langchain_core.schema.messages import ( +from langchain_core.load import Serializable +from langchain_core.messages import ( AIMessage, AnyMessage, BaseMessage, @@ -36,6 +29,11 @@ SystemMessage, get_buffer_string, ) +from langchain_core.prompts.base import BasePromptTemplate +from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.prompts.string import StringPromptTemplate +from langchain_core.prompts.value import PromptValue +from langchain_core.pydantic_v1 import Field, root_validator class BaseMessagePromptTemplate(Serializable, ABC): diff --git a/libs/core/langchain_core/prompts/few_shot.py b/libs/core/langchain_core/prompts/few_shot.py index b53c0a7ec5624..79473a89963b9 100644 --- a/libs/core/langchain_core/prompts/few_shot.py +++ b/libs/core/langchain_core/prompts/few_shot.py @@ -4,20 +4,19 @@ from pathlib import Path from typing import Any, Dict, List, Literal, Optional, Union -from langchain_core.prompts.base import ( - DEFAULT_FORMATTER_MAPPING, - StringPromptTemplate, - check_valid_template, - get_template_variables, -) +from langchain_core.messages import BaseMessage, get_buffer_string from langchain_core.prompts.chat import ( BaseChatPromptTemplate, BaseMessagePromptTemplate, ) -from langchain_core.prompts.example_selector.base import BaseExampleSelector from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.prompts.string import ( + DEFAULT_FORMATTER_MAPPING, + StringPromptTemplate, + check_valid_template, + get_template_variables, +) from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.messages import BaseMessage, get_buffer_string class _FewShotPromptTemplateMixin(BaseModel): @@ -27,7 +26,7 @@ class _FewShotPromptTemplateMixin(BaseModel): """Examples to format into the prompt. Either this or example_selector should be provided.""" - example_selector: Optional[BaseExampleSelector] = None + example_selector: Any = None """ExampleSelector to choose the examples to format into the prompt. Either this or examples should be provided.""" @@ -253,7 +252,7 @@ class FewShotChatMessagePromptTemplate( vectorstore=vectorstore ) - from langchain_core.schema import SystemMessage + from langchain_core import SystemMessage from langchain_core.prompts import HumanMessagePromptTemplate from langchain_core.prompts.few_shot import FewShotChatMessagePromptTemplate diff --git a/libs/core/langchain_core/prompts/few_shot_with_templates.py b/libs/core/langchain_core/prompts/few_shot_with_templates.py index 682a392bb082b..18c9c7df637b9 100644 --- a/libs/core/langchain_core/prompts/few_shot_with_templates.py +++ b/libs/core/langchain_core/prompts/few_shot_with_templates.py @@ -2,9 +2,11 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union -from langchain_core.prompts.base import DEFAULT_FORMATTER_MAPPING, StringPromptTemplate -from langchain_core.prompts.example_selector.base import BaseExampleSelector from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.prompts.string import ( + DEFAULT_FORMATTER_MAPPING, + StringPromptTemplate, +) from langchain_core.pydantic_v1 import Extra, root_validator @@ -15,7 +17,7 @@ class FewShotPromptWithTemplates(StringPromptTemplate): """Examples to format into the prompt. Either this or example_selector should be provided.""" - example_selector: Optional[BaseExampleSelector] = None + example_selector: Any = None """ExampleSelector to choose the examples to format into the prompt. Either this or examples should be provided.""" diff --git a/libs/core/langchain_core/prompts/loading.py b/libs/core/langchain_core/prompts/loading.py index 69238db0feaf3..e914cbb7b489a 100644 --- a/libs/core/langchain_core/prompts/loading.py +++ b/libs/core/langchain_core/prompts/loading.py @@ -6,13 +6,11 @@ import yaml +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts.base import BasePromptTemplate from langchain_core.prompts.few_shot import FewShotPromptTemplate from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import ( - BasePromptTemplate, - StrOutputParser, -) -from langchain_core.utils.loading import try_load_from_hub +from langchain_core.utils import try_load_from_hub URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/prompts/" logger = logging.getLogger(__name__) diff --git a/libs/core/langchain_core/prompts/pipeline.py b/libs/core/langchain_core/prompts/pipeline.py index dc39c592186c9..cffb11bbba8a8 100644 --- a/libs/core/langchain_core/prompts/pipeline.py +++ b/libs/core/langchain_core/prompts/pipeline.py @@ -1,8 +1,9 @@ from typing import Any, Dict, List, Tuple +from langchain_core.prompts.base import BasePromptTemplate from langchain_core.prompts.chat import BaseChatPromptTemplate +from langchain_core.prompts.value import PromptValue from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BasePromptTemplate, PromptValue def _get_inputs(inputs: dict, input_variables: List[str]) -> dict: diff --git a/libs/core/langchain_core/prompts/prompt.py b/libs/core/langchain_core/prompts/prompt.py index 349bc2f33b2c4..c192f46c51fcf 100644 --- a/libs/core/langchain_core/prompts/prompt.py +++ b/libs/core/langchain_core/prompts/prompt.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Any, Dict, List, Literal, Optional, Union -from langchain_core.prompts.base import ( +from langchain_core.prompts.string import ( DEFAULT_FORMATTER_MAPPING, StringPromptTemplate, check_valid_template, diff --git a/libs/core/langchain_core/prompts/string.py b/libs/core/langchain_core/prompts/string.py new file mode 100644 index 0000000000000..2645ffdca51b0 --- /dev/null +++ b/libs/core/langchain_core/prompts/string.py @@ -0,0 +1,173 @@ +"""BasePrompt schema definition.""" +from __future__ import annotations + +import warnings +from abc import ABC +from string import Formatter +from typing import Any, Callable, Dict, List, Literal, Set + +from langchain_core.messages import BaseMessage, HumanMessage +from langchain_core.prompts.base import BasePromptTemplate +from langchain_core.prompts.value import PromptValue +from langchain_core.utils.formatting import formatter + + +def jinja2_formatter(template: str, **kwargs: Any) -> str: + """Format a template using jinja2. + + *Security warning*: As of LangChain 0.0.329, this method uses Jinja2's + SandboxedEnvironment by default. However, this sand-boxing should + be treated as a best-effort approach rather than a guarantee of security. + Do not accept jinja2 templates from untrusted sources as they may lead + to arbitrary Python code execution. + + https://jinja.palletsprojects.com/en/3.1.x/sandbox/ + """ + try: + from jinja2.sandbox import SandboxedEnvironment + except ImportError: + raise ImportError( + "jinja2 not installed, which is needed to use the jinja2_formatter. " + "Please install it with `pip install jinja2`." + "Please be cautious when using jinja2 templates. " + "Do not expand jinja2 templates using unverified or user-controlled " + "inputs as that can result in arbitrary Python code execution." + ) + + # This uses a sandboxed environment to prevent arbitrary code execution. + # Jinja2 uses an opt-out rather than opt-in approach for sand-boxing. + # Please treat this sand-boxing as a best-effort approach rather than + # a guarantee of security. + # We recommend to never use jinja2 templates with untrusted inputs. + # https://jinja.palletsprojects.com/en/3.1.x/sandbox/ + # approach not a guarantee of security. + return SandboxedEnvironment().from_string(template).render(**kwargs) + + +def validate_jinja2(template: str, input_variables: List[str]) -> None: + """ + Validate that the input variables are valid for the template. + Issues a warning if missing or extra variables are found. + + Args: + template: The template string. + input_variables: The input variables. + """ + input_variables_set = set(input_variables) + valid_variables = _get_jinja2_variables_from_template(template) + missing_variables = valid_variables - input_variables_set + extra_variables = input_variables_set - valid_variables + + warning_message = "" + if missing_variables: + warning_message += f"Missing variables: {missing_variables} " + + if extra_variables: + warning_message += f"Extra variables: {extra_variables}" + + if warning_message: + warnings.warn(warning_message.strip()) + + +def _get_jinja2_variables_from_template(template: str) -> Set[str]: + try: + from jinja2 import Environment, meta + except ImportError: + raise ImportError( + "jinja2 not installed, which is needed to use the jinja2_formatter. " + "Please install it with `pip install jinja2`." + ) + env = Environment() + ast = env.parse(template) + variables = meta.find_undeclared_variables(ast) + return variables + + +DEFAULT_FORMATTER_MAPPING: Dict[str, Callable] = { + "f-string": formatter.format, + "jinja2": jinja2_formatter, +} + +DEFAULT_VALIDATOR_MAPPING: Dict[str, Callable] = { + "f-string": formatter.validate_input_variables, + "jinja2": validate_jinja2, +} + + +def check_valid_template( + template: str, template_format: str, input_variables: List[str] +) -> None: + """Check that template string is valid. + + Args: + template: The template string. + template_format: The template format. Should be one of "f-string" or "jinja2". + input_variables: The input variables. + + Raises: + ValueError: If the template format is not supported. + """ + if template_format not in DEFAULT_FORMATTER_MAPPING: + valid_formats = list(DEFAULT_FORMATTER_MAPPING) + raise ValueError( + f"Invalid template format. Got `{template_format}`;" + f" should be one of {valid_formats}" + ) + try: + validator_func = DEFAULT_VALIDATOR_MAPPING[template_format] + validator_func(template, input_variables) + except KeyError as e: + raise ValueError( + "Invalid prompt schema; check for mismatched or missing input parameters. " + + str(e) + ) + + +def get_template_variables(template: str, template_format: str) -> List[str]: + """Get the variables from the template. + + Args: + template: The template string. + template_format: The template format. Should be one of "f-string" or "jinja2". + + Returns: + The variables from the template. + + Raises: + ValueError: If the template format is not supported. + """ + if template_format == "jinja2": + # Get the variables for the template + input_variables = _get_jinja2_variables_from_template(template) + elif template_format == "f-string": + input_variables = { + v for _, v, _, _ in Formatter().parse(template) if v is not None + } + else: + raise ValueError(f"Unsupported template format: {template_format}") + + return sorted(input_variables) + + +class StringPromptValue(PromptValue): + """String prompt value.""" + + text: str + """Prompt text.""" + type: Literal["StringPromptValue"] = "StringPromptValue" + + def to_string(self) -> str: + """Return prompt as string.""" + return self.text + + def to_messages(self) -> List[BaseMessage]: + """Return prompt as messages.""" + return [HumanMessage(content=self.text)] + + +class StringPromptTemplate(BasePromptTemplate, ABC): + """String prompt that exposes the format method, returning a prompt.""" + + def format_prompt(self, **kwargs: Any) -> PromptValue: + """Create Chat Messages.""" + return StringPromptValue(text=self.format(**kwargs)) diff --git a/libs/core/langchain_core/schema/prompt.py b/libs/core/langchain_core/prompts/value.py similarity index 92% rename from libs/core/langchain_core/schema/prompt.py rename to libs/core/langchain_core/prompts/value.py index f20cfdf421674..bedbefe2e5daa 100644 --- a/libs/core/langchain_core/schema/prompt.py +++ b/libs/core/langchain_core/prompts/value.py @@ -4,7 +4,7 @@ from typing import List from langchain_core.load.serializable import Serializable -from langchain_core.schema.messages import BaseMessage +from langchain_core.messages import BaseMessage class PromptValue(Serializable, ABC): diff --git a/libs/core/langchain_core/schema/retriever.py b/libs/core/langchain_core/retrievers.py similarity index 99% rename from libs/core/langchain_core/schema/retriever.py rename to libs/core/langchain_core/retrievers.py index 1d8d177831747..7da99ae56e63a 100644 --- a/libs/core/langchain_core/schema/retriever.py +++ b/libs/core/langchain_core/retrievers.py @@ -7,9 +7,9 @@ from inspect import signature from typing import TYPE_CHECKING, Any, Dict, List, Optional +from langchain_core.documents import Document from langchain_core.load.dump import dumpd from langchain_core.runnables import RunnableConfig, RunnableSerializable -from langchain_core.schema.document import Document if TYPE_CHECKING: from langchain_core.callbacks.manager import ( diff --git a/libs/core/langchain_core/runnables/__init__.py b/libs/core/langchain_core/runnables/__init__.py index 3a1f555d1c889..b5940562a1fa8 100644 --- a/libs/core/langchain_core/runnables/__init__.py +++ b/libs/core/langchain_core/runnables/__init__.py @@ -25,7 +25,11 @@ RunnableSerializable, ) from langchain_core.runnables.branch import RunnableBranch -from langchain_core.runnables.config import RunnableConfig, patch_config +from langchain_core.runnables.config import ( + RunnableConfig, + get_config_list, + patch_config, +) from langchain_core.runnables.fallbacks import RunnableWithFallbacks from langchain_core.runnables.passthrough import RunnablePassthrough from langchain_core.runnables.router import RouterInput, RouterRunnable @@ -33,6 +37,7 @@ ConfigurableField, ConfigurableFieldMultiOption, ConfigurableFieldSingleOption, + add, ) __all__ = [ @@ -54,4 +59,6 @@ "RunnablePassthrough", "RunnableSequence", "RunnableWithFallbacks", + "get_config_list", + "add", ] diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index 384370f9dabba..2e4114078b922 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -36,11 +36,11 @@ AsyncCallbackManagerForChainRun, CallbackManagerForChainRun, ) - from langchain_core.callbacks.tracers.log_stream import RunLog, RunLogPatch - from langchain_core.callbacks.tracers.root_listeners import Listener from langchain_core.runnables.fallbacks import ( RunnableWithFallbacks as RunnableWithFallbacksT, ) + from langchain_core.tracers.log_stream import RunLog, RunLogPatch + from langchain_core.tracers.root_listeners import Listener from langchain_core.load.dump import dumpd from langchain_core.load.serializable import Serializable @@ -198,7 +198,7 @@ def buggy_double(y: int) -> int: ... code-block:: python - from langchain_core.callbacks.tracers import ConsoleCallbackHandler + from langchain_core.tracers import ConsoleCallbackHandler chain.invoke( ..., @@ -559,7 +559,7 @@ async def astream_log( """ from langchain_core.callbacks.base import BaseCallbackManager - from langchain_core.callbacks.tracers.log_stream import ( + from langchain_core.tracers.log_stream import ( LogStreamCallbackHandler, RunLog, RunLogPatch, @@ -725,7 +725,7 @@ def with_listeners( type, input, output, error, start_time, end_time, and any tags or metadata added to the run. """ - from langchain_core.callbacks.tracers.root_listeners import RootListenersTracer + from langchain_core.tracers.root_listeners import RootListenersTracer return RunnableBinding( bound=self, @@ -2945,7 +2945,7 @@ def with_listeners( type, input, output, error, start_time, end_time, and any tags or metadata added to the run. """ - from langchain_core.callbacks.tracers.root_listeners import RootListenersTracer + from langchain_core.tracers.root_listeners import RootListenersTracer return self.__class__( bound=self.bound, diff --git a/libs/core/langchain_core/runnables/fallbacks.py b/libs/core/langchain_core/runnables/fallbacks.py index 1959b100c8e87..999aa1ae21190 100644 --- a/libs/core/langchain_core/runnables/fallbacks.py +++ b/libs/core/langchain_core/runnables/fallbacks.py @@ -66,7 +66,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): # response. from langchain_core.prompts import PromptTemplate - from langchain_core.schema.output_parser import StrOutputParser + from langchain_core.output_parser import StrOutputParser from langchain_core.runnables import RunnableLambda def when_all_is_lost(inputs): diff --git a/libs/core/langchain_core/runnables/history.py b/libs/core/langchain_core/runnables/history.py index d8eb5814514f1..1ae0d8a5c7d41 100644 --- a/libs/core/langchain_core/runnables/history.py +++ b/libs/core/langchain_core/runnables/history.py @@ -13,6 +13,7 @@ Union, ) +from langchain_core.chat_history import BaseChatMessageHistory from langchain_core.load import load from langchain_core.pydantic_v1 import BaseModel, create_model from langchain_core.runnables.base import Runnable, RunnableBindingBase, RunnableLambda @@ -21,12 +22,11 @@ ConfigurableFieldSpec, get_unique_config_specs, ) -from langchain_core.schema.chat_history import BaseChatMessageHistory if TYPE_CHECKING: - from langchain_core.callbacks.tracers.schemas import Run + from langchain_core.messages import BaseMessage from langchain_core.runnables.config import RunnableConfig - from langchain_core.schema.messages import BaseMessage + from langchain_core.tracers.schemas import Run MessagesOrDictWithMessages = Union[Sequence["BaseMessage"], Dict[str, Any]] GetSessionHistoryCallable = Callable[..., BaseChatMessageHistory] @@ -178,7 +178,7 @@ def get_input_schema( ) -> Type[BaseModel]: super_schema = super().get_input_schema(config) if super_schema.__custom_root_type__ is not None: - from langchain_core.schema.messages import BaseMessage + from langchain_core.messages import BaseMessage fields: Dict = {} if self.input_messages_key and self.history_messages_key: @@ -202,10 +202,10 @@ def get_input_schema( def _get_input_messages( self, input_val: Union[str, BaseMessage, Sequence[BaseMessage]] ) -> List[BaseMessage]: - from langchain_core.schema.messages import BaseMessage + from langchain_core.messages import BaseMessage if isinstance(input_val, str): - from langchain_core.schema.messages import HumanMessage + from langchain_core.messages import HumanMessage return [HumanMessage(content=input_val)] elif isinstance(input_val, BaseMessage): @@ -221,13 +221,13 @@ def _get_input_messages( def _get_output_messages( self, output_val: Union[str, BaseMessage, Sequence[BaseMessage], dict] ) -> List[BaseMessage]: - from langchain_core.schema.messages import BaseMessage + from langchain_core.messages import BaseMessage if isinstance(output_val, dict): output_val = output_val[self.output_messages_key or "output"] if isinstance(output_val, str): - from langchain_core.schema.messages import AIMessage + from langchain_core.messages import AIMessage return [AIMessage(content=output_val)] elif isinstance(output_val, BaseMessage): diff --git a/libs/core/langchain_core/schema/__init__.py b/libs/core/langchain_core/schema/__init__.py deleted file mode 100644 index 7e1742cc32796..0000000000000 --- a/libs/core/langchain_core/schema/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -"""**Schemas** are the LangChain Base Classes and Interfaces.""" -from langchain_core.schema.agent import AgentAction, AgentFinish -from langchain_core.schema.cache import BaseCache -from langchain_core.schema.chat_history import BaseChatMessageHistory -from langchain_core.schema.document import BaseDocumentTransformer, Document -from langchain_core.schema.exceptions import LangChainException -from langchain_core.schema.memory import BaseMemory -from langchain_core.schema.messages import ( - AIMessage, - BaseMessage, - ChatMessage, - FunctionMessage, - HumanMessage, - SystemMessage, - _message_from_dict, - _message_to_dict, - get_buffer_string, - messages_from_dict, - messages_to_dict, -) -from langchain_core.schema.output import ( - ChatGeneration, - ChatResult, - Generation, - LLMResult, - RunInfo, -) -from langchain_core.schema.output_parser import ( - BaseLLMOutputParser, - BaseOutputParser, - OutputParserException, - StrOutputParser, -) -from langchain_core.schema.prompt import PromptValue -from langchain_core.schema.prompt_template import BasePromptTemplate, format_document -from langchain_core.schema.retriever import BaseRetriever -from langchain_core.schema.storage import BaseStore - -RUN_KEY = "__run" -Memory = BaseMemory - -__all__ = [ - "BaseCache", - "BaseMemory", - "BaseStore", - "AgentFinish", - "AgentAction", - "Document", - "BaseChatMessageHistory", - "BaseDocumentTransformer", - "BaseMessage", - "ChatMessage", - "FunctionMessage", - "HumanMessage", - "AIMessage", - "SystemMessage", - "messages_from_dict", - "messages_to_dict", - "_message_to_dict", - "_message_from_dict", - "get_buffer_string", - "RunInfo", - "LLMResult", - "ChatResult", - "ChatGeneration", - "Generation", - "PromptValue", - "LangChainException", - "BaseRetriever", - "RUN_KEY", - "Memory", - "OutputParserException", - "StrOutputParser", - "BaseOutputParser", - "BaseLLMOutputParser", - "BasePromptTemplate", - "format_document", -] diff --git a/libs/core/langchain_core/schema/exceptions.py b/libs/core/langchain_core/schema/exceptions.py deleted file mode 100644 index 27ed0d07dc122..0000000000000 --- a/libs/core/langchain_core/schema/exceptions.py +++ /dev/null @@ -1,2 +0,0 @@ -class LangChainException(Exception): - """General LangChain exception.""" diff --git a/libs/core/langchain_core/schema/messages.py b/libs/core/langchain_core/schema/messages.py deleted file mode 100644 index 9f96ce68ad950..0000000000000 --- a/libs/core/langchain_core/schema/messages.py +++ /dev/null @@ -1,415 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Dict, List, Sequence, Union - -from typing_extensions import Literal - -from langchain_core.load.serializable import Serializable -from langchain_core.pydantic_v1 import Extra, Field - -if TYPE_CHECKING: - from langchain_core.prompts.chat import ChatPromptTemplate - - -def get_buffer_string( - messages: Sequence[BaseMessage], human_prefix: str = "Human", ai_prefix: str = "AI" -) -> str: - """Convert sequence of Messages to strings and concatenate them into one string. - - Args: - messages: Messages to be converted to strings. - human_prefix: The prefix to prepend to contents of HumanMessages. - ai_prefix: THe prefix to prepend to contents of AIMessages. - - Returns: - A single string concatenation of all input messages. - - Example: - .. code-block:: python - - from langchain_core.schema import AIMessage, HumanMessage - - messages = [ - HumanMessage(content="Hi, how are you?"), - AIMessage(content="Good, how are you?"), - ] - get_buffer_string(messages) - # -> "Human: Hi, how are you?\nAI: Good, how are you?" - """ - string_messages = [] - for m in messages: - if isinstance(m, HumanMessage): - role = human_prefix - elif isinstance(m, AIMessage): - role = ai_prefix - elif isinstance(m, SystemMessage): - role = "System" - elif isinstance(m, FunctionMessage): - role = "Function" - elif isinstance(m, ChatMessage): - role = m.role - else: - raise ValueError(f"Got unsupported message type: {m}") - message = f"{role}: {m.content}" - if isinstance(m, AIMessage) and "function_call" in m.additional_kwargs: - message += f"{m.additional_kwargs['function_call']}" - string_messages.append(message) - - return "\n".join(string_messages) - - -class BaseMessage(Serializable): - """The base abstract Message class. - - Messages are the inputs and outputs of ChatModels. - """ - - content: Union[str, List[Union[str, Dict]]] - """The string contents of the message.""" - - additional_kwargs: dict = Field(default_factory=dict) - """Any additional information.""" - - type: str - - class Config: - extra = Extra.allow - - @classmethod - def is_lc_serializable(cls) -> bool: - """Return whether this class is serializable.""" - return True - - def __add__(self, other: Any) -> ChatPromptTemplate: - from langchain_core.prompts.chat import ChatPromptTemplate - - prompt = ChatPromptTemplate(messages=[self]) - return prompt + other - - -def merge_content( - first_content: Union[str, List[Union[str, Dict]]], - second_content: Union[str, List[Union[str, Dict]]], -) -> Union[str, List[Union[str, Dict]]]: - # If first chunk is a string - if isinstance(first_content, str): - # If the second chunk is also a string, then merge them naively - if isinstance(second_content, str): - return first_content + second_content - # If the second chunk is a list, add the first chunk to the start of the list - else: - return_list: List[Union[str, Dict]] = [first_content] - return return_list + second_content - # If both are lists, merge them naively - elif isinstance(second_content, List): - return first_content + second_content - # If the first content is a list, and the second content is a string - else: - # If the last element of the first content is a string - # Add the second content to the last element - if isinstance(first_content[-1], str): - return first_content[:-1] + [first_content[-1] + second_content] - else: - # Otherwise, add the second content as a new element of the list - return first_content + [second_content] - - -class BaseMessageChunk(BaseMessage): - """A Message chunk, which can be concatenated with other Message chunks.""" - - def _merge_kwargs_dict( - self, left: Dict[str, Any], right: Dict[str, Any] - ) -> Dict[str, Any]: - """Merge additional_kwargs from another BaseMessageChunk into this one.""" - merged = left.copy() - for k, v in right.items(): - if k not in merged: - merged[k] = v - elif type(merged[k]) != type(v): - raise ValueError( - f'additional_kwargs["{k}"] already exists in this message,' - " but with a different type." - ) - elif isinstance(merged[k], str): - merged[k] += v - elif isinstance(merged[k], dict): - merged[k] = self._merge_kwargs_dict(merged[k], v) - else: - raise ValueError( - f"Additional kwargs key {k} already exists in this message." - ) - return merged - - def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore - if isinstance(other, BaseMessageChunk): - # If both are (subclasses of) BaseMessageChunk, - # concat into a single BaseMessageChunk - - if isinstance(self, ChatMessageChunk): - return self.__class__( - role=self.role, - content=merge_content(self.content, other.content), - additional_kwargs=self._merge_kwargs_dict( - self.additional_kwargs, other.additional_kwargs - ), - ) - return self.__class__( - content=merge_content(self.content, other.content), - additional_kwargs=self._merge_kwargs_dict( - self.additional_kwargs, other.additional_kwargs - ), - ) - else: - raise TypeError( - 'unsupported operand type(s) for +: "' - f"{self.__class__.__name__}" - f'" and "{other.__class__.__name__}"' - ) - - -class HumanMessage(BaseMessage): - """A Message from a human.""" - - example: bool = False - """Whether this Message is being passed in to the model as part of an example - conversation. - """ - - type: Literal["human"] = "human" - - -HumanMessage.update_forward_refs() - - -class HumanMessageChunk(HumanMessage, BaseMessageChunk): - """A Human Message chunk.""" - - # Ignoring mypy re-assignment here since we're overriding the value - # to make sure that the chunk variant can be discriminated from the - # non-chunk variant. - type: Literal["HumanMessageChunk"] = "HumanMessageChunk" # type: ignore[assignment] # noqa: E501 - - -class AIMessage(BaseMessage): - """A Message from an AI.""" - - example: bool = False - """Whether this Message is being passed in to the model as part of an example - conversation. - """ - - type: Literal["ai"] = "ai" - - -AIMessage.update_forward_refs() - - -class AIMessageChunk(AIMessage, BaseMessageChunk): - """A Message chunk from an AI.""" - - # Ignoring mypy re-assignment here since we're overriding the value - # to make sure that the chunk variant can be discriminated from the - # non-chunk variant. - type: Literal["AIMessageChunk"] = "AIMessageChunk" # type: ignore[assignment] # noqa: E501 - - def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore - if isinstance(other, AIMessageChunk): - if self.example != other.example: - raise ValueError( - "Cannot concatenate AIMessageChunks with different example values." - ) - - return self.__class__( - example=self.example, - content=merge_content(self.content, other.content), - additional_kwargs=self._merge_kwargs_dict( - self.additional_kwargs, other.additional_kwargs - ), - ) - - return super().__add__(other) - - -class SystemMessage(BaseMessage): - """A Message for priming AI behavior, usually passed in as the first of a sequence - of input messages. - """ - - type: Literal["system"] = "system" - - -SystemMessage.update_forward_refs() - - -class SystemMessageChunk(SystemMessage, BaseMessageChunk): - """A System Message chunk.""" - - # Ignoring mypy re-assignment here since we're overriding the value - # to make sure that the chunk variant can be discriminated from the - # non-chunk variant. - type: Literal["SystemMessageChunk"] = "SystemMessageChunk" # type: ignore[assignment] # noqa: E501 - - -class FunctionMessage(BaseMessage): - """A Message for passing the result of executing a function back to a model.""" - - name: str - """The name of the function that was executed.""" - - type: Literal["function"] = "function" - - -FunctionMessage.update_forward_refs() - - -class FunctionMessageChunk(FunctionMessage, BaseMessageChunk): - """A Function Message chunk.""" - - # Ignoring mypy re-assignment here since we're overriding the value - # to make sure that the chunk variant can be discriminated from the - # non-chunk variant. - type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment] - - def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore - if isinstance(other, FunctionMessageChunk): - if self.name != other.name: - raise ValueError( - "Cannot concatenate FunctionMessageChunks with different names." - ) - - return self.__class__( - name=self.name, - content=merge_content(self.content, other.content), - additional_kwargs=self._merge_kwargs_dict( - self.additional_kwargs, other.additional_kwargs - ), - ) - - return super().__add__(other) - - -class ToolMessage(BaseMessage): - """A Message for passing the result of executing a tool back to a model.""" - - tool_call_id: str - """Tool call that this message is responding to.""" - - type: Literal["tool"] = "tool" - - -ToolMessage.update_forward_refs() - - -class ToolMessageChunk(ToolMessage, BaseMessageChunk): - """A Tool Message chunk.""" - - # Ignoring mypy re-assignment here since we're overriding the value - # to make sure that the chunk variant can be discriminated from the - # non-chunk variant. - type: Literal["ToolMessageChunk"] = "ToolMessageChunk" # type: ignore[assignment] - - def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore - if isinstance(other, ToolMessageChunk): - if self.tool_call_id != other.tool_call_id: - raise ValueError( - "Cannot concatenate ToolMessageChunks with different names." - ) - - return self.__class__( - tool_call_id=self.tool_call_id, - content=merge_content(self.content, other.content), - additional_kwargs=self._merge_kwargs_dict( - self.additional_kwargs, other.additional_kwargs - ), - ) - - return super().__add__(other) - - -class ChatMessage(BaseMessage): - """A Message that can be assigned an arbitrary speaker (i.e. role).""" - - role: str - """The speaker / role of the Message.""" - - type: Literal["chat"] = "chat" - - -ChatMessage.update_forward_refs() - - -class ChatMessageChunk(ChatMessage, BaseMessageChunk): - """A Chat Message chunk.""" - - # Ignoring mypy re-assignment here since we're overriding the value - # to make sure that the chunk variant can be discriminated from the - # non-chunk variant. - type: Literal["ChatMessageChunk"] = "ChatMessageChunk" # type: ignore - - def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore - if isinstance(other, ChatMessageChunk): - if self.role != other.role: - raise ValueError( - "Cannot concatenate ChatMessageChunks with different roles." - ) - - return self.__class__( - role=self.role, - content=merge_content(self.content, other.content), - additional_kwargs=self._merge_kwargs_dict( - self.additional_kwargs, other.additional_kwargs - ), - ) - - return super().__add__(other) - - -AnyMessage = Union[ - AIMessage, HumanMessage, ChatMessage, SystemMessage, FunctionMessage, ToolMessage -] - - -def _message_to_dict(message: BaseMessage) -> dict: - return {"type": message.type, "data": message.dict()} - - -def messages_to_dict(messages: Sequence[BaseMessage]) -> List[dict]: - """Convert a sequence of Messages to a list of dictionaries. - - Args: - messages: Sequence of messages (as BaseMessages) to convert. - - Returns: - List of messages as dicts. - """ - return [_message_to_dict(m) for m in messages] - - -def _message_from_dict(message: dict) -> BaseMessage: - _type = message["type"] - if _type == "human": - return HumanMessage(**message["data"]) - elif _type == "ai": - return AIMessage(**message["data"]) - elif _type == "system": - return SystemMessage(**message["data"]) - elif _type == "chat": - return ChatMessage(**message["data"]) - elif _type == "function": - return FunctionMessage(**message["data"]) - elif _type == "tool": - return ToolMessage(**message["data"]) - else: - raise ValueError(f"Got unexpected message type: {_type}") - - -def messages_from_dict(messages: List[dict]) -> List[BaseMessage]: - """Convert a sequence of messages from dicts to Message objects. - - Args: - messages: Sequence of messages (as dicts) to convert. - - Returns: - List of messages (BaseMessages). - """ - return [_message_from_dict(m) for m in messages] diff --git a/libs/core/langchain_core/schema/output.py b/libs/core/langchain_core/schema/output.py deleted file mode 100644 index a4ca64beb4d3f..0000000000000 --- a/libs/core/langchain_core/schema/output.py +++ /dev/null @@ -1,175 +0,0 @@ -from __future__ import annotations - -from copy import deepcopy -from typing import Any, Dict, List, Literal, Optional -from uuid import UUID - -from langchain_core.load.serializable import Serializable -from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.messages import BaseMessage, BaseMessageChunk - - -class Generation(Serializable): - """A single text generation output.""" - - text: str - """Generated text output.""" - - generation_info: Optional[Dict[str, Any]] = None - """Raw response from the provider. May include things like the - reason for finishing or token log probabilities. - """ - type: Literal["Generation"] = "Generation" - """Type is used exclusively for serialization purposes.""" - # TODO: add log probs as separate attribute - - @classmethod - def is_lc_serializable(cls) -> bool: - """Return whether this class is serializable.""" - return True - - -class GenerationChunk(Generation): - """A Generation chunk, which can be concatenated with other Generation chunks.""" - - def __add__(self, other: GenerationChunk) -> GenerationChunk: - if isinstance(other, GenerationChunk): - generation_info = ( - {**(self.generation_info or {}), **(other.generation_info or {})} - if self.generation_info is not None or other.generation_info is not None - else None - ) - return GenerationChunk( - text=self.text + other.text, - generation_info=generation_info, - ) - else: - raise TypeError( - f"unsupported operand type(s) for +: '{type(self)}' and '{type(other)}'" - ) - - -class ChatGeneration(Generation): - """A single chat generation output.""" - - text: str = "" - """*SHOULD NOT BE SET DIRECTLY* The text contents of the output message.""" - message: BaseMessage - """The message output by the chat model.""" - # Override type to be ChatGeneration, ignore mypy error as this is intentional - type: Literal["ChatGeneration"] = "ChatGeneration" # type: ignore[assignment] - """Type is used exclusively for serialization purposes.""" - - @root_validator - def set_text(cls, values: Dict[str, Any]) -> Dict[str, Any]: - """Set the text attribute to be the contents of the message.""" - try: - values["text"] = values["message"].content - except (KeyError, AttributeError) as e: - raise ValueError("Error while initializing ChatGeneration") from e - return values - - -class ChatGenerationChunk(ChatGeneration): - """A ChatGeneration chunk, which can be concatenated with other - ChatGeneration chunks. - - Attributes: - message: The message chunk output by the chat model. - """ - - message: BaseMessageChunk - # Override type to be ChatGeneration, ignore mypy error as this is intentional - type: Literal["ChatGenerationChunk"] = "ChatGenerationChunk" # type: ignore[assignment] # noqa: E501 - """Type is used exclusively for serialization purposes.""" - - def __add__(self, other: ChatGenerationChunk) -> ChatGenerationChunk: - if isinstance(other, ChatGenerationChunk): - generation_info = ( - {**(self.generation_info or {}), **(other.generation_info or {})} - if self.generation_info is not None or other.generation_info is not None - else None - ) - return ChatGenerationChunk( - message=self.message + other.message, - generation_info=generation_info, - ) - else: - raise TypeError( - f"unsupported operand type(s) for +: '{type(self)}' and '{type(other)}'" - ) - - -class RunInfo(BaseModel): - """Class that contains metadata for a single execution of a Chain or model.""" - - run_id: UUID - """A unique identifier for the model or chain run.""" - - -class ChatResult(BaseModel): - """Class that contains all results for a single chat model call.""" - - generations: List[ChatGeneration] - """List of the chat generations. This is a List because an input can have multiple - candidate generations. - """ - llm_output: Optional[dict] = None - """For arbitrary LLM provider specific output.""" - - -class LLMResult(BaseModel): - """Class that contains all results for a batched LLM call.""" - - generations: List[List[Generation]] - """List of generated outputs. This is a List[List[]] because - each input could have multiple candidate generations.""" - llm_output: Optional[dict] = None - """Arbitrary LLM provider-specific output.""" - run: Optional[List[RunInfo]] = None - """List of metadata info for model call for each input.""" - - def flatten(self) -> List[LLMResult]: - """Flatten generations into a single list. - - Unpack List[List[Generation]] -> List[LLMResult] where each returned LLMResult - contains only a single Generation. If token usage information is available, - it is kept only for the LLMResult corresponding to the top-choice - Generation, to avoid over-counting of token usage downstream. - - Returns: - List of LLMResults where each returned LLMResult contains a single - Generation. - """ - llm_results = [] - for i, gen_list in enumerate(self.generations): - # Avoid double counting tokens in OpenAICallback - if i == 0: - llm_results.append( - LLMResult( - generations=[gen_list], - llm_output=self.llm_output, - ) - ) - else: - if self.llm_output is not None: - llm_output = deepcopy(self.llm_output) - llm_output["token_usage"] = dict() - else: - llm_output = None - llm_results.append( - LLMResult( - generations=[gen_list], - llm_output=llm_output, - ) - ) - return llm_results - - def __eq__(self, other: object) -> bool: - """Check for LLMResult equality by ignoring any metadata related to runs.""" - if not isinstance(other, LLMResult): - return NotImplemented - return ( - self.generations == other.generations - and self.llm_output == other.llm_output - ) diff --git a/libs/core/langchain_core/schema/prompt_template.py b/libs/core/langchain_core/schema/prompt_template.py deleted file mode 100644 index 2985c5bb06c7f..0000000000000 --- a/libs/core/langchain_core/schema/prompt_template.py +++ /dev/null @@ -1,228 +0,0 @@ -from __future__ import annotations - -import json -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Any, Callable, Dict, List, Mapping, Optional, Type, Union - -import yaml - -from langchain_core.pydantic_v1 import BaseModel, Field, create_model, root_validator -from langchain_core.runnables import RunnableConfig, RunnableSerializable -from langchain_core.schema.document import Document -from langchain_core.schema.output_parser import BaseOutputParser -from langchain_core.schema.prompt import PromptValue - - -class BasePromptTemplate(RunnableSerializable[Dict, PromptValue], ABC): - """Base class for all prompt templates, returning a prompt.""" - - input_variables: List[str] - """A list of the names of the variables the prompt template expects.""" - input_types: Dict[str, Any] = Field(default_factory=dict) - """A dictionary of the types of the variables the prompt template expects. - If not provided, all variables are assumed to be strings.""" - output_parser: Optional[BaseOutputParser] = None - """How to parse the output of calling an LLM on this formatted prompt.""" - partial_variables: Mapping[str, Union[str, Callable[[], str]]] = Field( - default_factory=dict - ) - - @classmethod - def is_lc_serializable(cls) -> bool: - """Return whether this class is serializable.""" - return True - - class Config: - """Configuration for this pydantic object.""" - - arbitrary_types_allowed = True - - @property - def OutputType(self) -> Any: - from langchain_core.prompts.base import StringPromptValue - from langchain_core.prompts.chat import ChatPromptValueConcrete - - return Union[StringPromptValue, ChatPromptValueConcrete] - - def get_input_schema( - self, config: Optional[RunnableConfig] = None - ) -> Type[BaseModel]: - # This is correct, but pydantic typings/mypy don't think so. - return create_model( # type: ignore[call-overload] - "PromptInput", - **{k: (self.input_types.get(k, str), None) for k in self.input_variables}, - ) - - def invoke( - self, input: Dict, config: Optional[RunnableConfig] = None - ) -> PromptValue: - return self._call_with_config( - lambda inner_input: self.format_prompt( - **{key: inner_input[key] for key in self.input_variables} - ), - input, - config, - run_type="prompt", - ) - - @abstractmethod - def format_prompt(self, **kwargs: Any) -> PromptValue: - """Create Chat Messages.""" - - @root_validator() - def validate_variable_names(cls, values: Dict) -> Dict: - """Validate variable names do not include restricted names.""" - if "stop" in values["input_variables"]: - raise ValueError( - "Cannot have an input variable named 'stop', as it is used internally," - " please rename." - ) - if "stop" in values["partial_variables"]: - raise ValueError( - "Cannot have an partial variable named 'stop', as it is used " - "internally, please rename." - ) - - overall = set(values["input_variables"]).intersection( - values["partial_variables"] - ) - if overall: - raise ValueError( - f"Found overlapping input and partial variables: {overall}" - ) - return values - - def partial(self, **kwargs: Union[str, Callable[[], str]]) -> BasePromptTemplate: - """Return a partial of the prompt template.""" - prompt_dict = self.__dict__.copy() - prompt_dict["input_variables"] = list( - set(self.input_variables).difference(kwargs) - ) - prompt_dict["partial_variables"] = {**self.partial_variables, **kwargs} - return type(self)(**prompt_dict) - - def _merge_partial_and_user_variables(self, **kwargs: Any) -> Dict[str, Any]: - # Get partial params: - partial_kwargs = { - k: v if isinstance(v, str) else v() - for k, v in self.partial_variables.items() - } - return {**partial_kwargs, **kwargs} - - @abstractmethod - def format(self, **kwargs: Any) -> str: - """Format the prompt with the inputs. - - Args: - kwargs: Any arguments to be passed to the prompt template. - - Returns: - A formatted string. - - Example: - - .. code-block:: python - - prompt.format(variable1="foo") - """ - - @property - def _prompt_type(self) -> str: - """Return the prompt type key.""" - raise NotImplementedError - - def dict(self, **kwargs: Any) -> Dict: - """Return dictionary representation of prompt.""" - prompt_dict = super().dict(**kwargs) - try: - prompt_dict["_type"] = self._prompt_type - except NotImplementedError: - pass - return prompt_dict - - def save(self, file_path: Union[Path, str]) -> None: - """Save the prompt. - - Args: - file_path: Path to directory to save prompt to. - - Example: - .. code-block:: python - - prompt.save(file_path="path/prompt.yaml") - """ - if self.partial_variables: - raise ValueError("Cannot save prompt with partial variables.") - - # Fetch dictionary to save - prompt_dict = self.dict() - if "_type" not in prompt_dict: - raise NotImplementedError(f"Prompt {self} does not support saving.") - - # Convert file to Path object. - if isinstance(file_path, str): - save_path = Path(file_path) - else: - save_path = file_path - - directory_path = save_path.parent - directory_path.mkdir(parents=True, exist_ok=True) - - if save_path.suffix == ".json": - with open(file_path, "w") as f: - json.dump(prompt_dict, f, indent=4) - elif save_path.suffix == ".yaml": - with open(file_path, "w") as f: - yaml.dump(prompt_dict, f, default_flow_style=False) - else: - raise ValueError(f"{save_path} must be json or yaml") - - -def format_document(doc: Document, prompt: BasePromptTemplate) -> str: - """Format a document into a string based on a prompt template. - - First, this pulls information from the document from two sources: - - 1. `page_content`: - This takes the information from the `document.page_content` - and assigns it to a variable named `page_content`. - 2. metadata: - This takes information from `document.metadata` and assigns - it to variables of the same name. - - Those variables are then passed into the `prompt` to produce a formatted string. - - Args: - doc: Document, the page_content and metadata will be used to create - the final string. - prompt: BasePromptTemplate, will be used to format the page_content - and metadata into the final string. - - Returns: - string of the document formatted. - - Example: - .. code-block:: python - - from langchain_core.schema import Document - from langchain_core.prompts import PromptTemplate - - doc = Document(page_content="This is a joke", metadata={"page": "1"}) - prompt = PromptTemplate.from_template("Page {page}: {page_content}") - format_document(doc, prompt) - >>> "Page 1: This is a joke" - """ - base_info = {"page_content": doc.page_content, **doc.metadata} - missing_metadata = set(prompt.input_variables).difference(base_info) - if len(missing_metadata) > 0: - required_metadata = [ - iv for iv in prompt.input_variables if iv != "page_content" - ] - raise ValueError( - f"Document prompt requires documents to have metadata variables: " - f"{required_metadata}. Received document with missing metadata: " - f"{list(missing_metadata)}." - ) - document_info = {k: base_info[k] for k in prompt.input_variables} - return prompt.format(**document_info) diff --git a/libs/core/langchain_core/schema/storage.py b/libs/core/langchain_core/stores.py similarity index 100% rename from libs/core/langchain_core/schema/storage.py rename to libs/core/langchain_core/stores.py diff --git a/libs/core/langchain_core/tool.py b/libs/core/langchain_core/tools.py similarity index 100% rename from libs/core/langchain_core/tool.py rename to libs/core/langchain_core/tools.py diff --git a/libs/core/langchain_core/tracers/__init__.py b/libs/core/langchain_core/tracers/__init__.py new file mode 100644 index 0000000000000..303a3716b7966 --- /dev/null +++ b/libs/core/langchain_core/tracers/__init__.py @@ -0,0 +1,16 @@ +__all__ = [ + "BaseTracer", + "EvaluatorCallbackHandler", + "LangChainTracer", + "ConsoleCallbackHandler", + "Run", + "RunLog", + "RunLogPatch", +] + +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.evaluation import EvaluatorCallbackHandler +from langchain_core.tracers.langchain import LangChainTracer +from langchain_core.tracers.log_stream import RunLog, RunLogPatch +from langchain_core.tracers.schemas import Run +from langchain_core.tracers.stdout import ConsoleCallbackHandler diff --git a/libs/core/langchain_core/callbacks/tracers/base.py b/libs/core/langchain_core/tracers/base.py similarity index 98% rename from libs/core/langchain_core/callbacks/tracers/base.py rename to libs/core/langchain_core/tracers/base.py index 1b3ba4092730c..a8479c1a7118a 100644 --- a/libs/core/langchain_core/callbacks/tracers/base.py +++ b/libs/core/langchain_core/tracers/base.py @@ -9,24 +9,21 @@ from tenacity import RetryCallState -from langchain_core.callbacks.base import BaseCallbackHandler -from langchain_core.callbacks.tracers.schemas import Run -from langchain_core.load.dump import dumpd -from langchain_core.schema.document import Document -from langchain_core.schema.output import ( +from langchain_core.callbacks import BaseCallbackHandler +from langchain_core.documents import Document +from langchain_core.exceptions import TracerException +from langchain_core.load import dumpd +from langchain_core.outputs import ( ChatGeneration, ChatGenerationChunk, GenerationChunk, LLMResult, ) +from langchain_core.tracers.schemas import Run logger = logging.getLogger(__name__) -class TracerException(Exception): - """Base class for exceptions in tracers module.""" - - class BaseTracer(BaseCallbackHandler, ABC): """Base interface for tracers.""" diff --git a/libs/core/langchain_core/callbacks/tracers/evaluation.py b/libs/core/langchain_core/tracers/evaluation.py similarity index 96% rename from libs/core/langchain_core/callbacks/tracers/evaluation.py rename to libs/core/langchain_core/tracers/evaluation.py index fa0f62e8879a3..685430a5afd92 100644 --- a/libs/core/langchain_core/callbacks/tracers/evaluation.py +++ b/libs/core/langchain_core/tracers/evaluation.py @@ -12,10 +12,10 @@ from langsmith.evaluation.evaluator import EvaluationResult, EvaluationResults from langchain_core.callbacks import manager -from langchain_core.callbacks.tracers import langchain as langchain_tracer -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.langchain import _get_executor -from langchain_core.callbacks.tracers.schemas import Run +from langchain_core.tracers import langchain as langchain_tracer +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.langchain import _get_executor +from langchain_core.tracers.schemas import Run logger = logging.getLogger(__name__) diff --git a/libs/core/langchain_core/callbacks/tracers/langchain.py b/libs/core/langchain_core/tracers/langchain.py similarity index 97% rename from libs/core/langchain_core/callbacks/tracers/langchain.py rename to libs/core/langchain_core/tracers/langchain.py index 7ab7f44a1f636..f1b91bb177dca 100644 --- a/libs/core/langchain_core/callbacks/tracers/langchain.py +++ b/libs/core/langchain_core/tracers/langchain.py @@ -17,11 +17,11 @@ wait_exponential_jitter, ) -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.schemas import Run from langchain_core.env import get_runtime_environment -from langchain_core.load.dump import dumpd -from langchain_core.schema.messages import BaseMessage +from langchain_core.load import dumpd +from langchain_core.messages import BaseMessage +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import Run logger = logging.getLogger(__name__) _LOGGED = set() diff --git a/libs/core/langchain_core/callbacks/tracers/langchain_v1.py b/libs/core/langchain_core/tracers/langchain_v1.py similarity index 97% rename from libs/core/langchain_core/callbacks/tracers/langchain_v1.py rename to libs/core/langchain_core/tracers/langchain_v1.py index 733a8dcf2502b..f2178b24a6101 100644 --- a/libs/core/langchain_core/callbacks/tracers/langchain_v1.py +++ b/libs/core/langchain_core/tracers/langchain_v1.py @@ -6,8 +6,9 @@ import requests -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.schemas import ( +from langchain_core.messages import get_buffer_string +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import ( ChainRun, LLMRun, Run, @@ -16,7 +17,6 @@ TracerSessionV1, TracerSessionV1Base, ) -from langchain_core.schema.messages import get_buffer_string from langchain_core.utils import raise_for_status_with_text logger = logging.getLogger(__name__) diff --git a/libs/core/langchain_core/callbacks/tracers/log_stream.py b/libs/core/langchain_core/tracers/log_stream.py similarity index 97% rename from libs/core/langchain_core/callbacks/tracers/log_stream.py rename to libs/core/langchain_core/tracers/log_stream.py index 53c28e4d06c56..97bcf452b3fc9 100644 --- a/libs/core/langchain_core/callbacks/tracers/log_stream.py +++ b/libs/core/langchain_core/tracers/log_stream.py @@ -18,10 +18,10 @@ import jsonpatch from anyio import create_memory_object_stream -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.schemas import Run -from langchain_core.load.load import load -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk +from langchain_core.load import load +from langchain_core.outputs import ChatGenerationChunk, GenerationChunk +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import Run class LogEntry(TypedDict): diff --git a/libs/core/langchain_core/callbacks/tracers/root_listeners.py b/libs/core/langchain_core/tracers/root_listeners.py similarity index 92% rename from libs/core/langchain_core/callbacks/tracers/root_listeners.py rename to libs/core/langchain_core/tracers/root_listeners.py index a693ae1f1a503..63dc5a2b7b7f8 100644 --- a/libs/core/langchain_core/callbacks/tracers/root_listeners.py +++ b/libs/core/langchain_core/tracers/root_listeners.py @@ -1,12 +1,12 @@ from typing import Callable, Optional, Union from uuid import UUID -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.schemas import Run from langchain_core.runnables.config import ( RunnableConfig, call_func_with_variable_args, ) +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import Run Listener = Union[Callable[[Run], None], Callable[[Run, RunnableConfig], None]] diff --git a/libs/core/langchain_core/callbacks/tracers/run_collector.py b/libs/core/langchain_core/tracers/run_collector.py similarity index 92% rename from libs/core/langchain_core/callbacks/tracers/run_collector.py rename to libs/core/langchain_core/tracers/run_collector.py index e03ab00aa1760..995ac7cc683dc 100644 --- a/libs/core/langchain_core/callbacks/tracers/run_collector.py +++ b/libs/core/langchain_core/tracers/run_collector.py @@ -3,8 +3,8 @@ from typing import Any, List, Optional, Union from uuid import UUID -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.schemas import Run +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import Run class RunCollectorCallbackHandler(BaseTracer): diff --git a/libs/core/langchain_core/callbacks/tracers/schemas.py b/libs/core/langchain_core/tracers/schemas.py similarity index 98% rename from libs/core/langchain_core/callbacks/tracers/schemas.py rename to libs/core/langchain_core/tracers/schemas.py index 93436b70a4174..7db106ec2c47d 100644 --- a/libs/core/langchain_core/callbacks/tracers/schemas.py +++ b/libs/core/langchain_core/tracers/schemas.py @@ -9,8 +9,8 @@ from langsmith.schemas import RunBase as BaseRunV2 from langsmith.schemas import RunTypeEnum as RunTypeEnumDep +from langchain_core.outputs import LLMResult from langchain_core.pydantic_v1 import BaseModel, Field, root_validator -from langchain_core.schema import LLMResult def RunTypeEnum() -> Type[RunTypeEnumDep]: diff --git a/libs/core/langchain_core/callbacks/tracers/stdout.py b/libs/core/langchain_core/tracers/stdout.py similarity index 98% rename from libs/core/langchain_core/callbacks/tracers/stdout.py rename to libs/core/langchain_core/tracers/stdout.py index 8a6b61e31335c..0054b3471b85f 100644 --- a/libs/core/langchain_core/callbacks/tracers/stdout.py +++ b/libs/core/langchain_core/tracers/stdout.py @@ -1,8 +1,8 @@ import json from typing import Any, Callable, List -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.schemas import Run +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import Run from langchain_core.utils.input import get_bolded_text, get_colored_text diff --git a/libs/core/langchain_core/utils/__init__.py b/libs/core/langchain_core/utils/__init__.py index df7a586b8c3e8..570e223d54816 100644 --- a/libs/core/langchain_core/utils/__init__.py +++ b/libs/core/langchain_core/utils/__init__.py @@ -11,6 +11,7 @@ get_colored_text, print_text, ) +from langchain_core.utils.loading import try_load_from_hub from langchain_core.utils.utils import ( check_package_version, convert_to_secret_str, @@ -35,4 +36,5 @@ "print_text", "raise_for_status_with_text", "xor_args", + "try_load_from_hub", ] diff --git a/libs/core/langchain_core/schema/vectorstore.py b/libs/core/langchain_core/vectorstores.py similarity index 99% rename from libs/core/langchain_core/schema/vectorstore.py rename to libs/core/langchain_core/vectorstores.py index 078a05d739c5e..a215d08c46fb7 100644 --- a/libs/core/langchain_core/schema/vectorstore.py +++ b/libs/core/langchain_core/vectorstores.py @@ -21,10 +21,10 @@ TypeVar, ) +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import BaseRetriever -from langchain_core.schema.document import Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever if TYPE_CHECKING: from langchain_core.callbacks.manager import ( diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index b7b8a3184ebc0..b363f3f6debef 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-core" -version = "0.0.1" +version = "0.0.2" description = "Building applications with LLMs through composability" authors = [] license = "MIT" diff --git a/libs/core/tests/unit_tests/_api/test_imports.py b/libs/core/tests/unit_tests/_api/test_imports.py index 440f76d9574da..9b901aea0123e 100644 --- a/libs/core/tests/unit_tests/_api/test_imports.py +++ b/libs/core/tests/unit_tests/_api/test_imports.py @@ -6,6 +6,8 @@ "suppress_langchain_deprecation_warning", "surface_langchain_deprecation_warnings", "warn_deprecated", + "as_import_path", + "get_relative_path", ] diff --git a/libs/core/langchain_core/callbacks/tracers/__init__.py b/libs/core/tests/unit_tests/example_selectors/__init__.py similarity index 100% rename from libs/core/langchain_core/callbacks/tracers/__init__.py rename to libs/core/tests/unit_tests/example_selectors/__init__.py diff --git a/libs/core/tests/unit_tests/prompts/test_length_based_example_selector.py b/libs/core/tests/unit_tests/example_selectors/test_length_based_example_selector.py similarity index 94% rename from libs/core/tests/unit_tests/prompts/test_length_based_example_selector.py rename to libs/core/tests/unit_tests/example_selectors/test_length_based_example_selector.py index 59e35f8f6e5ba..8687ceae3d38a 100644 --- a/libs/core/tests/unit_tests/prompts/test_length_based_example_selector.py +++ b/libs/core/tests/unit_tests/example_selectors/test_length_based_example_selector.py @@ -1,10 +1,10 @@ """Test functionality related to length based selector.""" import pytest -from langchain_core.prompts.example_selector.length_based import ( +from langchain_core.example_selectors import ( LengthBasedExampleSelector, ) -from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.prompts import PromptTemplate EXAMPLES = [ {"question": "Question: who are you?\nAnswer: foo"}, diff --git a/libs/core/tests/unit_tests/fake/callbacks.py b/libs/core/tests/unit_tests/fake/callbacks.py index aec0c2202abad..2a2af92269fe7 100644 --- a/libs/core/tests/unit_tests/fake/callbacks.py +++ b/libs/core/tests/unit_tests/fake/callbacks.py @@ -4,8 +4,8 @@ from uuid import UUID from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler +from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema.messages import BaseMessage class BaseFakeCallbackHandler(BaseModel): diff --git a/libs/core/tests/unit_tests/fake/chat_model.py b/libs/core/tests/unit_tests/fake/chat_model.py index 4a5a84064d0a4..e1268ad4fd3dd 100644 --- a/libs/core/tests/unit_tests/fake/chat_model.py +++ b/libs/core/tests/unit_tests/fake/chat_model.py @@ -7,10 +7,9 @@ AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, ) -from langchain_core.chat_model import BaseChatModel, SimpleChatModel -from langchain_core.schema import ChatResult -from langchain_core.schema.messages import AIMessageChunk, BaseMessage -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk +from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel +from langchain_core.messages import AIMessageChunk, BaseMessage +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult class FakeMessagesListChatModel(BaseChatModel): diff --git a/libs/core/tests/unit_tests/fake/llm.py b/libs/core/tests/unit_tests/fake/llm.py index fa1d92b104332..1ebff8d8ca1b9 100644 --- a/libs/core/tests/unit_tests/fake/llm.py +++ b/libs/core/tests/unit_tests/fake/llm.py @@ -6,9 +6,8 @@ AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, ) -from langchain_core.llm import LLM +from langchain_core.language_models import LLM, LanguageModelInput from langchain_core.runnables import RunnableConfig -from langchain_core.schema.language_model import LanguageModelInput class FakeListLLM(LLM): diff --git a/libs/core/tests/unit_tests/fake/memory.py b/libs/core/tests/unit_tests/fake/memory.py index 3dc5142e461c1..43dd53dc5eba4 100644 --- a/libs/core/tests/unit_tests/fake/memory.py +++ b/libs/core/tests/unit_tests/fake/memory.py @@ -1,10 +1,10 @@ from typing import List -from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import ( +from langchain_core.chat_history import ( BaseChatMessageHistory, ) -from langchain_core.schema.messages import BaseMessage +from langchain_core.messages import BaseMessage +from langchain_core.pydantic_v1 import BaseModel, Field class ChatMessageHistory(BaseChatMessageHistory, BaseModel): diff --git a/libs/core/tests/unit_tests/prompts/test_chat.py b/libs/core/tests/unit_tests/prompts/test_chat.py index 6272d0b2c9150..1e919e28e6f47 100644 --- a/libs/core/tests/unit_tests/prompts/test_chat.py +++ b/libs/core/tests/unit_tests/prompts/test_chat.py @@ -3,6 +3,13 @@ import pytest +from langchain_core.messages import ( + AIMessage, + BaseMessage, + HumanMessage, + SystemMessage, + get_buffer_string, +) from langchain_core.prompts import PromptTemplate from langchain_core.prompts.chat import ( AIMessagePromptTemplate, @@ -15,13 +22,6 @@ SystemMessagePromptTemplate, _convert_to_message, ) -from langchain_core.schema.messages import ( - AIMessage, - BaseMessage, - HumanMessage, - SystemMessage, - get_buffer_string, -) def create_messages() -> List[BaseMessagePromptTemplate]: diff --git a/libs/core/tests/unit_tests/prompts/test_few_shot.py b/libs/core/tests/unit_tests/prompts/test_few_shot.py index 5955ba8cb009c..7304e275830a8 100644 --- a/libs/core/tests/unit_tests/prompts/test_few_shot.py +++ b/libs/core/tests/unit_tests/prompts/test_few_shot.py @@ -3,19 +3,19 @@ import pytest +from langchain_core.example_selectors import BaseExampleSelector +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_core.prompts import ( AIMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, ) from langchain_core.prompts.chat import SystemMessagePromptTemplate -from langchain_core.prompts.example_selector.base import BaseExampleSelector from langchain_core.prompts.few_shot import ( FewShotChatMessagePromptTemplate, FewShotPromptTemplate, ) from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import AIMessage, HumanMessage, SystemMessage EXAMPLE_PROMPT = PromptTemplate( input_variables=["question", "answer"], template="{question}: {answer}" diff --git a/libs/core/tests/unit_tests/prompts/test_imports.py b/libs/core/tests/unit_tests/prompts/test_imports.py index b70a3e6fc2a8d..4f1b5dcacc03b 100644 --- a/libs/core/tests/unit_tests/prompts/test_imports.py +++ b/libs/core/tests/unit_tests/prompts/test_imports.py @@ -6,20 +6,22 @@ "BasePromptTemplate", "ChatMessagePromptTemplate", "ChatPromptTemplate", + "ChatPromptValueConcrete", "FewShotPromptTemplate", "FewShotPromptWithTemplates", + "FewShotChatMessagePromptTemplate", + "format_document", + "ChatPromptValue", + "PromptValue", + "StringPromptValue", "HumanMessagePromptTemplate", - "LengthBasedExampleSelector", - "MaxMarginalRelevanceExampleSelector", "MessagesPlaceholder", "PipelinePromptTemplate", "Prompt", "PromptTemplate", - "SemanticSimilarityExampleSelector", "StringPromptTemplate", "SystemMessagePromptTemplate", "load_prompt", - "FewShotChatMessagePromptTemplate", ] diff --git a/libs/core/tests/unit_tests/prompts/test_utils.py b/libs/core/tests/unit_tests/prompts/test_utils.py index ac3cf84e43e9b..1f7ea043e9ec9 100644 --- a/libs/core/tests/unit_tests/prompts/test_utils.py +++ b/libs/core/tests/unit_tests/prompts/test_utils.py @@ -1,5 +1,5 @@ """Test functionality related to prompt utils.""" -from langchain_core.prompts.example_selector.semantic_similarity import sorted_values +from langchain_core.example_selectors import sorted_values def test_sorted_vals() -> None: diff --git a/libs/core/tests/unit_tests/runnable/__init__.py b/libs/core/tests/unit_tests/runnables/__init__.py similarity index 100% rename from libs/core/tests/unit_tests/runnable/__init__.py rename to libs/core/tests/unit_tests/runnables/__init__.py diff --git a/libs/core/tests/unit_tests/runnable/__snapshots__/test_runnable.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr similarity index 89% rename from libs/core/tests/unit_tests/runnable/__snapshots__/test_runnable.ambr rename to libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr index d91c0f5345a7d..127e7122c0a4c 100644 --- a/libs/core/tests/unit_tests/runnable/__snapshots__/test_runnable.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr @@ -466,7 +466,7 @@ # --- # name: test_combining_sequences.3 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ['foo', 'bar']}, outputs={'question': 'foobar'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:4'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'foobar'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nicer assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'foobar', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:5'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['baz, qux'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nicer assistant.\nHuman: foobar']}, outputs={'generations': [[{'text': 'baz, qux', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'baz, qux'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:6'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='baz, qux')}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:7'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ['foo', 'bar']}, outputs={'question': 'foobar'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:4'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'foobar'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nicer assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'foobar', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:5'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['baz, qux'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nicer assistant.\nHuman: foobar']}, outputs={'generations': [[{'text': 'baz, qux', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'baz, qux'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:6'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='baz, qux')}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:7'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_each @@ -574,7 +574,7 @@ "id": [ "tests", "unit_tests", - "runnable", + "runnables", "test_runnable", "FakeSplitIntoListParser" ], @@ -1053,7 +1053,7 @@ # --- # name: test_prompt_with_chat_model.2 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_chat_model_and_parser @@ -1173,7 +1173,7 @@ # --- # name: test_prompt_with_chat_model_and_parser.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_chat_model_async @@ -1286,7 +1286,7 @@ # --- # name: test_prompt_with_chat_model_async.2 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_llm @@ -1393,13 +1393,13 @@ # --- # name: test_prompt_with_llm.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_llm.2 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'bar'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'bar', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), - Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your favorite color?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your favorite color?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'bar'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'bar', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your favorite color?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your favorite color?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_llm_and_async_lambda @@ -1519,7 +1519,7 @@ # --- # name: test_prompt_with_llm_and_async_lambda.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}], 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='passthrough', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': 'foo'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}], 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='passthrough', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': 'foo'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_router_runnable @@ -4377,7 +4377,7 @@ "id": [ "tests", "unit_tests", - "runnable", + "runnables", "test_runnable", "FakeRetriever" ], diff --git a/libs/core/tests/unit_tests/runnable/test_config.py b/libs/core/tests/unit_tests/runnables/test_config.py similarity index 94% rename from libs/core/tests/unit_tests/runnable/test_config.py rename to libs/core/tests/unit_tests/runnables/test_config.py index e15c5e48a4a2b..4710b2d525651 100644 --- a/libs/core/tests/unit_tests/runnable/test_config.py +++ b/libs/core/tests/unit_tests/runnables/test_config.py @@ -1,8 +1,8 @@ from langchain_core.callbacks.manager import CallbackManager from langchain_core.callbacks.stdout import StdOutCallbackHandler from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler -from langchain_core.callbacks.tracers.stdout import ConsoleCallbackHandler from langchain_core.runnables.config import RunnableConfig, merge_configs +from langchain_core.tracers.stdout import ConsoleCallbackHandler def test_merge_config_callbacks() -> None: diff --git a/libs/core/tests/unit_tests/runnable/test_history.py b/libs/core/tests/unit_tests/runnables/test_history.py similarity index 97% rename from libs/core/tests/unit_tests/runnable/test_history.py rename to libs/core/tests/unit_tests/runnables/test_history.py index 534ff12b44c81..9c51cd5875899 100644 --- a/libs/core/tests/unit_tests/runnable/test_history.py +++ b/libs/core/tests/unit_tests/runnables/test_history.py @@ -1,9 +1,10 @@ from typing import Any, Callable, Sequence, Union +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain_core.pydantic_v1 import BaseModel -from langchain_core.runnables import RunnableConfig, RunnableLambda +from langchain_core.runnables.base import RunnableLambda +from langchain_core.runnables.config import RunnableConfig from langchain_core.runnables.history import RunnableWithMessageHistory -from langchain_core.schema import AIMessage, BaseMessage, HumanMessage from tests.unit_tests.fake.memory import ChatMessageHistory diff --git a/libs/core/tests/unit_tests/runnable/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py similarity index 98% rename from libs/core/tests/unit_tests/runnable/test_runnable.py rename to libs/core/tests/unit_tests/runnables/test_runnable.py index 21c5a28b689b8..f1f591db49702 100644 --- a/libs/core/tests/unit_tests/runnable/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -26,53 +26,55 @@ collect_runs, trace_as_chain_group, ) -from langchain_core.callbacks.tracers.base import BaseTracer -from langchain_core.callbacks.tracers.log_stream import RunLog, RunLogPatch -from langchain_core.callbacks.tracers.schemas import Run -from langchain_core.callbacks.tracers.stdout import ConsoleCallbackHandler -from langchain_core.load.dump import dumpd, dumps -from langchain_core.output_parsers.list import CommaSeparatedListOutputParser -from langchain_core.prompts import PromptTemplate -from langchain_core.prompts.base import StringPromptValue -from langchain_core.prompts.chat import ( +from langchain_core.documents import Document +from langchain_core.load import dumpd, dumps +from langchain_core.messages import ( + AIMessage, + AIMessageChunk, + HumanMessage, + SystemMessage, +) +from langchain_core.output_parsers import ( + BaseOutputParser, + CommaSeparatedListOutputParser, + StrOutputParser, +) +from langchain_core.prompts import ( ChatPromptTemplate, ChatPromptValue, HumanMessagePromptTemplate, MessagesPlaceholder, + PromptTemplate, + StringPromptValue, SystemMessagePromptTemplate, ) from langchain_core.pydantic_v1 import BaseModel +from langchain_core.retrievers import BaseRetriever from langchain_core.runnables import ( + ConfigurableField, + ConfigurableFieldMultiOption, + ConfigurableFieldSingleOption, RouterRunnable, Runnable, + RunnableBinding, RunnableBranch, RunnableConfig, + RunnableGenerator, RunnableLambda, RunnableParallel, RunnablePassthrough, RunnableSequence, RunnableWithFallbacks, -) -from langchain_core.runnables.base import ( - ConfigurableField, - RunnableBinding, - RunnableGenerator, -) -from langchain_core.runnables.utils import ( - ConfigurableFieldMultiOption, - ConfigurableFieldSingleOption, add, ) -from langchain_core.schema.document import Document -from langchain_core.schema.messages import ( - AIMessage, - AIMessageChunk, - HumanMessage, - SystemMessage, +from langchain_core.tools import BaseTool, tool +from langchain_core.tracers import ( + BaseTracer, + ConsoleCallbackHandler, + Run, + RunLog, + RunLogPatch, ) -from langchain_core.schema.output_parser import BaseOutputParser, StrOutputParser -from langchain_core.schema.retriever import BaseRetriever -from langchain_core.tool import BaseTool, tool from tests.unit_tests.fake.chat_model import FakeListChatModel from tests.unit_tests.fake.llm import FakeListLLM, FakeStreamingListLLM @@ -1539,7 +1541,7 @@ def test_with_listeners(mocker: MockerFixture) -> None: ) chat = FakeListChatModel(responses=["foo"]) - chain = prompt | chat + chain: Runnable = prompt | chat mock_start = mocker.Mock() mock_end = mocker.Mock() @@ -1572,7 +1574,7 @@ async def test_with_listeners_async(mocker: MockerFixture) -> None: ) chat = FakeListChatModel(responses=["foo"]) - chain = prompt | chat + chain: Runnable = prompt | chat mock_start = mocker.Mock() mock_end = mocker.Mock() @@ -1608,7 +1610,7 @@ def test_prompt_with_chat_model( ) chat = FakeListChatModel(responses=["foo"]) - chain = prompt | chat + chain: Runnable = prompt | chat assert repr(chain) == snapshot assert isinstance(chain, RunnableSequence) @@ -1712,7 +1714,7 @@ async def test_prompt_with_chat_model_async( ) chat = FakeListChatModel(responses=["foo"]) - chain = prompt | chat + chain: Runnable = prompt | chat assert repr(chain) == snapshot assert isinstance(chain, RunnableSequence) @@ -1819,7 +1821,7 @@ async def test_prompt_with_llm( ) llm = FakeListLLM(responses=["foo", "bar"]) - chain = prompt | llm + chain: Runnable = prompt | llm assert isinstance(chain, RunnableSequence) assert chain.first == prompt @@ -2325,13 +2327,13 @@ def test_seq_prompt_dict(mocker: MockerFixture, snapshot: SnapshotAssertion) -> async def test_router_runnable( mocker: MockerFixture, snapshot: SnapshotAssertion ) -> None: - chain1 = ChatPromptTemplate.from_template( + chain1: Runnable = ChatPromptTemplate.from_template( "You are a math genius. Answer the question: {question}" ) | FakeListLLM(responses=["4"]) - chain2 = ChatPromptTemplate.from_template( + chain2: Runnable = ChatPromptTemplate.from_template( "You are an english major. Answer the question: {question}" ) | FakeListLLM(responses=["2"]) - router = RouterRunnable({"math": chain1, "english": chain2}) + router: Runnable = RouterRunnable({"math": chain1, "english": chain2}) chain: Runnable = { "key": lambda x: x["key"], "input": {"question": lambda x: x["question"]}, @@ -2377,10 +2379,10 @@ async def test_router_runnable( async def test_higher_order_lambda_runnable( mocker: MockerFixture, snapshot: SnapshotAssertion ) -> None: - math_chain = ChatPromptTemplate.from_template( + math_chain: Runnable = ChatPromptTemplate.from_template( "You are a math genius. Answer the question: {question}" ) | FakeListLLM(responses=["4"]) - english_chain = ChatPromptTemplate.from_template( + english_chain: Runnable = ChatPromptTemplate.from_template( "You are an english major. Answer the question: {question}" ) | FakeListLLM(responses=["2"]) input_map: Runnable = RunnableParallel( @@ -3096,7 +3098,7 @@ async def test_deep_astream_assign() -> None: def test_runnable_sequence_transform() -> None: llm = FakeStreamingListLLM(responses=["foo-lish"]) - chain = llm | StrOutputParser() + chain: Runnable = llm | StrOutputParser() stream = chain.transform(llm.stream("Hi there!")) @@ -3111,7 +3113,7 @@ def test_runnable_sequence_transform() -> None: async def test_runnable_sequence_atransform() -> None: llm = FakeStreamingListLLM(responses=["foo-lish"]) - chain = llm | StrOutputParser() + chain: Runnable = llm | StrOutputParser() stream = chain.atransform(llm.astream("Hi there!")) diff --git a/libs/core/tests/unit_tests/runnable/test_utils.py b/libs/core/tests/unit_tests/runnables/test_utils.py similarity index 100% rename from libs/core/tests/unit_tests/runnable/test_utils.py rename to libs/core/tests/unit_tests/runnables/test_utils.py diff --git a/libs/core/tests/unit_tests/schema/__init__.py b/libs/core/tests/unit_tests/schema/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/libs/core/tests/unit_tests/schema/test_imports.py b/libs/core/tests/unit_tests/schema/test_imports.py deleted file mode 100644 index 5bc2f228798ae..0000000000000 --- a/libs/core/tests/unit_tests/schema/test_imports.py +++ /dev/null @@ -1,43 +0,0 @@ -from langchain_core.schema import __all__ - -EXPECTED_ALL = [ - "BaseCache", - "BaseMemory", - "BaseStore", - "AgentFinish", - "AgentAction", - "Document", - "BaseChatMessageHistory", - "BaseDocumentTransformer", - "BaseMessage", - "ChatMessage", - "FunctionMessage", - "HumanMessage", - "AIMessage", - "SystemMessage", - "messages_from_dict", - "messages_to_dict", - "_message_to_dict", - "_message_from_dict", - "get_buffer_string", - "RunInfo", - "LLMResult", - "ChatResult", - "ChatGeneration", - "Generation", - "PromptValue", - "LangChainException", - "BaseRetriever", - "RUN_KEY", - "Memory", - "OutputParserException", - "StrOutputParser", - "BaseOutputParser", - "BaseLLMOutputParser", - "BasePromptTemplate", - "format_document", -] - - -def test_all_imports() -> None: - assert set(__all__) == set(EXPECTED_ALL) diff --git a/libs/core/tests/unit_tests/schema/test_messages.py b/libs/core/tests/unit_tests/test_messages.py similarity index 98% rename from libs/core/tests/unit_tests/schema/test_messages.py rename to libs/core/tests/unit_tests/test_messages.py index 8c263e6ed91ce..9e8918dbd8953 100644 --- a/libs/core/tests/unit_tests/schema/test_messages.py +++ b/libs/core/tests/unit_tests/test_messages.py @@ -1,6 +1,6 @@ import pytest -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessageChunk, ChatMessageChunk, FunctionMessageChunk, diff --git a/libs/core/tests/unit_tests/schema/test_output.py b/libs/core/tests/unit_tests/test_outputs.py similarity index 94% rename from libs/core/tests/unit_tests/schema/test_output.py rename to libs/core/tests/unit_tests/test_outputs.py index 5e086c5e5a363..a2f6e3e80e2b8 100644 --- a/libs/core/tests/unit_tests/schema/test_output.py +++ b/libs/core/tests/unit_tests/test_outputs.py @@ -1,5 +1,5 @@ -from langchain_core.schema.messages import HumanMessageChunk -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk +from langchain_core.messages import HumanMessageChunk +from langchain_core.outputs import ChatGenerationChunk, GenerationChunk def test_generation_chunk() -> None: diff --git a/libs/core/tests/unit_tests/test_tool.py b/libs/core/tests/unit_tests/test_tools.py similarity index 99% rename from libs/core/tests/unit_tests/test_tool.py rename to libs/core/tests/unit_tests/test_tools.py index 1f63798925e42..dda3be342f3a5 100644 --- a/libs/core/tests/unit_tests/test_tool.py +++ b/libs/core/tests/unit_tests/test_tools.py @@ -7,12 +7,12 @@ import pytest -from langchain_core.callbacks.manager import ( +from langchain_core.callbacks import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain_core.pydantic_v1 import BaseModel -from langchain_core.tool import ( +from langchain_core.tools import ( BaseTool, SchemaAnnotationError, StructuredTool, diff --git a/libs/core/tests/unit_tests/utils/test_imports.py b/libs/core/tests/unit_tests/utils/test_imports.py index 91e5c016e7d67..c553a984e567f 100644 --- a/libs/core/tests/unit_tests/utils/test_imports.py +++ b/libs/core/tests/unit_tests/utils/test_imports.py @@ -14,6 +14,7 @@ "print_text", "raise_for_status_with_text", "xor_args", + "try_load_from_hub", ] diff --git a/libs/langchain/langchain/__init__.py b/libs/langchain/langchain/__init__.py index 8f47b520aa540..5dbd5dc229b1a 100644 --- a/libs/langchain/langchain/__init__.py +++ b/libs/langchain/langchain/__init__.py @@ -251,7 +251,7 @@ def __getattr__(name: str) -> Any: return PromptTemplate elif name == "BasePromptTemplate": - from langchain_core.schema.prompt_template import BasePromptTemplate + from langchain_core.prompts import BasePromptTemplate _warn_on_import( name, replacement="langchain.schema.prompt_template.BasePromptTemplate" diff --git a/libs/langchain/langchain/adapters/openai.py b/libs/langchain/langchain/adapters/openai.py index dad0d4e419dc4..8607468b81d23 100644 --- a/libs/langchain/langchain/adapters/openai.py +++ b/libs/langchain/langchain/adapters/openai.py @@ -13,8 +13,8 @@ overload, ) -from langchain_core.schema.chat import ChatSession -from langchain_core.schema.messages import ( +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, diff --git a/libs/langchain/langchain/agents/agent.py b/libs/langchain/langchain/agents/agent.py index 905826ccf2f9c..d6dbb7bb8a95f 100644 --- a/libs/langchain/langchain/agents/agent.py +++ b/libs/langchain/langchain/agents/agent.py @@ -19,19 +19,25 @@ ) import yaml -from langchain_core.prompts.few_shot import FewShotPromptTemplate -from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.runnables import Runnable -from langchain_core.schema import ( +from langchain_core.agents import ( AgentAction, AgentFinish, +) +from langchain_core.exceptions import ( + OutputParserException, +) +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage +from langchain_core.output_parsers import ( BaseOutputParser, +) +from langchain_core.prompts import ( BasePromptTemplate, - OutputParserException, ) -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage +from langchain_core.prompts.few_shot import FewShotPromptTemplate +from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.pydantic_v1 import BaseModel, root_validator +from langchain_core.runnables import Runnable from langchain_core.utils.input import get_color_mapping from langchain.agents.agent_iterator import AgentExecutorIterator diff --git a/libs/langchain/langchain/agents/agent_iterator.py b/libs/langchain/langchain/agents/agent_iterator.py index f0da9972a3130..6a6dccaab89ce 100644 --- a/libs/langchain/langchain/agents/agent_iterator.py +++ b/libs/langchain/langchain/agents/agent_iterator.py @@ -18,8 +18,9 @@ Union, ) +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.load.dump import dumpd -from langchain_core.schema import RUN_KEY, AgentAction, AgentFinish, RunInfo +from langchain_core.outputs import RunInfo from langchain_core.utils.input import get_color_mapping from langchain.callbacks.manager import ( @@ -29,6 +30,7 @@ CallbackManagerForChainRun, Callbacks, ) +from langchain.schema import RUN_KEY from langchain.tools import BaseTool from langchain.utilities.asyncio import asyncio_timeout diff --git a/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py b/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py index f68a0a05817e7..105a6047a14f9 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py +++ b/libs/langchain/langchain/agents/agent_toolkits/conversational_retrieval/openai_functions.py @@ -1,9 +1,9 @@ from typing import Any, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.memory import BaseMemory +from langchain_core.messages import SystemMessage from langchain_core.prompts.chat import MessagesPlaceholder -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.memory import BaseMemory -from langchain_core.schema.messages import SystemMessage from langchain.agents.agent import AgentExecutor from langchain.agents.openai_functions_agent.agent_token_buffer_memory import ( diff --git a/libs/langchain/langchain/agents/agent_toolkits/json/base.py b/libs/langchain/langchain/agents/agent_toolkits/json/base.py index 4d1d7c1cee20d..2d1f5dfe52a21 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/json/base.py +++ b/libs/langchain/langchain/agents/agent_toolkits/json/base.py @@ -1,7 +1,7 @@ """Json agent.""" from typing import Any, Dict, List, Optional -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX diff --git a/libs/langchain/langchain/agents/agent_toolkits/nla/tool.py b/libs/langchain/langchain/agents/agent_toolkits/nla/tool.py index 232bb673a0b63..427941c6c473b 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/nla/tool.py +++ b/libs/langchain/langchain/agents/agent_toolkits/nla/tool.py @@ -3,7 +3,7 @@ from typing import Any, Optional -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.tools import Tool from langchain.chains.api.openapi.chain import OpenAPIEndpointChain diff --git a/libs/langchain/langchain/agents/agent_toolkits/nla/toolkit.py b/libs/langchain/langchain/agents/agent_toolkits/nla/toolkit.py index 9e868e84c0c7b..1dfcca2271546 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/nla/toolkit.py +++ b/libs/langchain/langchain/agents/agent_toolkits/nla/toolkit.py @@ -2,8 +2,8 @@ from typing import Any, List, Optional, Sequence +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.agents.agent_toolkits.nla.tool import NLATool diff --git a/libs/langchain/langchain/agents/agent_toolkits/openapi/base.py b/libs/langchain/langchain/agents/agent_toolkits/openapi/base.py index cb86dcb84d243..5e46f51485d27 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/openapi/base.py +++ b/libs/langchain/langchain/agents/agent_toolkits/openapi/base.py @@ -1,7 +1,7 @@ """OpenAPI spec agent.""" from typing import Any, Dict, List, Optional -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.openapi.prompt import ( diff --git a/libs/langchain/langchain/agents/agent_toolkits/openapi/planner.py b/libs/langchain/langchain/agents/agent_toolkits/openapi/planner.py index 4c20c496f6fec..530418e28841a 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/openapi/planner.py +++ b/libs/langchain/langchain/agents/agent_toolkits/openapi/planner.py @@ -5,10 +5,9 @@ from typing import Any, Callable, Dict, List, Optional import yaml -from langchain_core.prompts import PromptTemplate +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate, PromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.openapi.planner_prompt import ( diff --git a/libs/langchain/langchain/agents/agent_toolkits/openapi/toolkit.py b/libs/langchain/langchain/agents/agent_toolkits/openapi/toolkit.py index ab5650651b155..b662d14fdf3ba 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/openapi/toolkit.py +++ b/libs/langchain/langchain/agents/agent_toolkits/openapi/toolkit.py @@ -3,7 +3,7 @@ from typing import Any, List -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.base import BaseToolkit diff --git a/libs/langchain/langchain/agents/agent_toolkits/powerbi/base.py b/libs/langchain/langchain/agents/agent_toolkits/powerbi/base.py index 2a638e3ca171f..ec512f2a428ae 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/powerbi/base.py +++ b/libs/langchain/langchain/agents/agent_toolkits/powerbi/base.py @@ -1,7 +1,7 @@ """Power BI agent.""" from typing import Any, Dict, List, Optional -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents import AgentExecutor from langchain.agents.agent_toolkits.powerbi.prompt import ( diff --git a/libs/langchain/langchain/agents/agent_toolkits/powerbi/toolkit.py b/libs/langchain/langchain/agents/agent_toolkits/powerbi/toolkit.py index ebcff6aa498e0..c9f9bf6263678 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/powerbi/toolkit.py +++ b/libs/langchain/langchain/agents/agent_toolkits/powerbi/toolkit.py @@ -1,6 +1,7 @@ """Toolkit for interacting with a Power BI dataset.""" from typing import List, Optional, Union +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate from langchain_core.prompts.chat import ( ChatPromptTemplate, @@ -8,7 +9,6 @@ SystemMessagePromptTemplate, ) from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.callbacks.base import BaseCallbackManager diff --git a/libs/langchain/langchain/agents/agent_toolkits/spark_sql/base.py b/libs/langchain/langchain/agents/agent_toolkits/spark_sql/base.py index 5a0071e87c707..d85d6195d6712 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/spark_sql/base.py +++ b/libs/langchain/langchain/agents/agent_toolkits/spark_sql/base.py @@ -1,7 +1,7 @@ """Spark SQL agent.""" from typing import Any, Dict, List, Optional -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.spark_sql.prompt import SQL_PREFIX, SQL_SUFFIX diff --git a/libs/langchain/langchain/agents/agent_toolkits/spark_sql/toolkit.py b/libs/langchain/langchain/agents/agent_toolkits/spark_sql/toolkit.py index 280f38e9eabb5..412d3ba13da44 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/spark_sql/toolkit.py +++ b/libs/langchain/langchain/agents/agent_toolkits/spark_sql/toolkit.py @@ -1,8 +1,8 @@ """Toolkit for interacting with Spark SQL.""" from typing import List +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.tools import BaseTool diff --git a/libs/langchain/langchain/agents/agent_toolkits/sql/base.py b/libs/langchain/langchain/agents/agent_toolkits/sql/base.py index 5d5064f3519a2..8103d68953d38 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/sql/base.py +++ b/libs/langchain/langchain/agents/agent_toolkits/sql/base.py @@ -1,13 +1,13 @@ """SQL agent.""" from typing import Any, Dict, List, Optional, Sequence +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import AIMessage, SystemMessage from langchain_core.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, ) -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import AIMessage, SystemMessage from langchain.agents.agent import AgentExecutor, BaseSingleActionAgent from langchain.agents.agent_toolkits.sql.prompt import ( diff --git a/libs/langchain/langchain/agents/agent_toolkits/sql/toolkit.py b/libs/langchain/langchain/agents/agent_toolkits/sql/toolkit.py index f1a217eaa6823..cf354263d5611 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/sql/toolkit.py +++ b/libs/langchain/langchain/agents/agent_toolkits/sql/toolkit.py @@ -1,8 +1,8 @@ """Toolkit for interacting with an SQL database.""" from typing import List +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.tools import BaseTool diff --git a/libs/langchain/langchain/agents/agent_toolkits/vectorstore/base.py b/libs/langchain/langchain/agents/agent_toolkits/vectorstore/base.py index 66d4f2c0faa67..49e1626c8ad1c 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/vectorstore/base.py +++ b/libs/langchain/langchain/agents/agent_toolkits/vectorstore/base.py @@ -1,7 +1,7 @@ """VectorStore agent.""" from typing import Any, Dict, Optional -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.vectorstore.prompt import PREFIX, ROUTER_PREFIX diff --git a/libs/langchain/langchain/agents/agent_toolkits/vectorstore/toolkit.py b/libs/langchain/langchain/agents/agent_toolkits/vectorstore/toolkit.py index d820b79a97c2f..1548c3d58aaf5 100644 --- a/libs/langchain/langchain/agents/agent_toolkits/vectorstore/toolkit.py +++ b/libs/langchain/langchain/agents/agent_toolkits/vectorstore/toolkit.py @@ -1,9 +1,9 @@ """Toolkit for interacting with a vector store.""" from typing import List +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.llms.openai import OpenAI diff --git a/libs/langchain/langchain/agents/chat/base.py b/libs/langchain/langchain/agents/chat/base.py index 399d003ccf43c..b70a55d767273 100644 --- a/libs/langchain/langchain/agents/chat/base.py +++ b/libs/langchain/langchain/agents/chat/base.py @@ -1,13 +1,14 @@ from typing import Any, List, Optional, Sequence, Tuple +from langchain_core.agents import AgentAction +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, ) from langchain_core.pydantic_v1 import Field -from langchain_core.schema import AgentAction, BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import Agent, AgentOutputParser from langchain.agents.chat.output_parser import ChatOutputParser diff --git a/libs/langchain/langchain/agents/chat/output_parser.py b/libs/langchain/langchain/agents/chat/output_parser.py index 565ab8c519b58..069a7fdbb5cd7 100644 --- a/libs/langchain/langchain/agents/chat/output_parser.py +++ b/libs/langchain/langchain/agents/chat/output_parser.py @@ -2,7 +2,8 @@ import re from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser from langchain.agents.chat.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/conversational/base.py b/libs/langchain/langchain/agents/conversational/base.py index cfae34b51df56..9908efe2a903a 100644 --- a/libs/langchain/langchain/agents/conversational/base.py +++ b/libs/langchain/langchain/agents/conversational/base.py @@ -3,9 +3,9 @@ from typing import Any, List, Optional, Sequence +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import Agent, AgentOutputParser from langchain.agents.agent_types import AgentType diff --git a/libs/langchain/langchain/agents/conversational/output_parser.py b/libs/langchain/langchain/agents/conversational/output_parser.py index d57d9922ee347..dacb2173840a4 100644 --- a/libs/langchain/langchain/agents/conversational/output_parser.py +++ b/libs/langchain/langchain/agents/conversational/output_parser.py @@ -1,7 +1,8 @@ import re from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser from langchain.agents.conversational.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/conversational_chat/base.py b/libs/langchain/langchain/agents/conversational_chat/base.py index 901f44223f2f9..372f843019700 100644 --- a/libs/langchain/langchain/agents/conversational_chat/base.py +++ b/libs/langchain/langchain/agents/conversational_chat/base.py @@ -3,6 +3,11 @@ from typing import Any, List, Optional, Sequence, Tuple +from langchain_core.agents import AgentAction +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, @@ -10,9 +15,6 @@ SystemMessagePromptTemplate, ) from langchain_core.pydantic_v1 import Field -from langchain_core.schema import AgentAction, BaseOutputParser, BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage from langchain.agents.agent import Agent, AgentOutputParser from langchain.agents.conversational_chat.output_parser import ConvoOutputParser diff --git a/libs/langchain/langchain/agents/conversational_chat/output_parser.py b/libs/langchain/langchain/agents/conversational_chat/output_parser.py index a18e9e9a91114..4265ee553341a 100644 --- a/libs/langchain/langchain/agents/conversational_chat/output_parser.py +++ b/libs/langchain/langchain/agents/conversational_chat/output_parser.py @@ -2,7 +2,8 @@ from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents import AgentOutputParser from langchain.agents.conversational_chat.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/format_scratchpad/log.py b/libs/langchain/langchain/agents/format_scratchpad/log.py index 06eb965bef8e1..742758a8ed75e 100644 --- a/libs/langchain/langchain/agents/format_scratchpad/log.py +++ b/libs/langchain/langchain/agents/format_scratchpad/log.py @@ -1,6 +1,6 @@ from typing import List, Tuple -from langchain_core.schema.agent import AgentAction +from langchain_core.agents import AgentAction def format_log_to_str( diff --git a/libs/langchain/langchain/agents/format_scratchpad/log_to_messages.py b/libs/langchain/langchain/agents/format_scratchpad/log_to_messages.py index bf39e75ba11e2..1329632b0c214 100644 --- a/libs/langchain/langchain/agents/format_scratchpad/log_to_messages.py +++ b/libs/langchain/langchain/agents/format_scratchpad/log_to_messages.py @@ -1,7 +1,7 @@ from typing import List, Tuple -from langchain_core.schema.agent import AgentAction -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.agents import AgentAction +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage def format_log_to_messages( diff --git a/libs/langchain/langchain/agents/format_scratchpad/openai_functions.py b/libs/langchain/langchain/agents/format_scratchpad/openai_functions.py index 16aa3a23db164..b5f09ea249927 100644 --- a/libs/langchain/langchain/agents/format_scratchpad/openai_functions.py +++ b/libs/langchain/langchain/agents/format_scratchpad/openai_functions.py @@ -1,8 +1,8 @@ import json from typing import List, Sequence, Tuple -from langchain_core.schema.agent import AgentAction, AgentActionMessageLog -from langchain_core.schema.messages import AIMessage, BaseMessage, FunctionMessage +from langchain_core.agents import AgentAction, AgentActionMessageLog +from langchain_core.messages import AIMessage, BaseMessage, FunctionMessage def _convert_agent_action_to_messages( diff --git a/libs/langchain/langchain/agents/format_scratchpad/openai_tools.py b/libs/langchain/langchain/agents/format_scratchpad/openai_tools.py index b2f490c70ccf4..d9717dc803eed 100644 --- a/libs/langchain/langchain/agents/format_scratchpad/openai_tools.py +++ b/libs/langchain/langchain/agents/format_scratchpad/openai_tools.py @@ -1,8 +1,8 @@ import json from typing import List, Sequence, Tuple -from langchain_core.schema.agent import AgentAction -from langchain_core.schema.messages import ( +from langchain_core.agents import AgentAction +from langchain_core.messages import ( AIMessage, BaseMessage, ToolMessage, diff --git a/libs/langchain/langchain/agents/format_scratchpad/xml.py b/libs/langchain/langchain/agents/format_scratchpad/xml.py index a7db742c86638..e0ea960c14f40 100644 --- a/libs/langchain/langchain/agents/format_scratchpad/xml.py +++ b/libs/langchain/langchain/agents/format_scratchpad/xml.py @@ -1,6 +1,6 @@ from typing import List, Tuple -from langchain_core.schema.agent import AgentAction +from langchain_core.agents import AgentAction def format_xml( diff --git a/libs/langchain/langchain/agents/initialize.py b/libs/langchain/langchain/agents/initialize.py index c114d5733cc9c..dc7402f9220b7 100644 --- a/libs/langchain/langchain/agents/initialize.py +++ b/libs/langchain/langchain/agents/initialize.py @@ -1,7 +1,7 @@ """Load agent.""" from typing import Any, Optional, Sequence -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import AgentExecutor from langchain.agents.agent_types import AgentType diff --git a/libs/langchain/langchain/agents/load_tools.py b/libs/langchain/langchain/agents/load_tools.py index 2e7254343a3ed..a60053f06e66d 100644 --- a/libs/langchain/langchain/agents/load_tools.py +++ b/libs/langchain/langchain/agents/load_tools.py @@ -19,7 +19,7 @@ from mypy_extensions import Arg, KwArg from langchain.agents.tools import Tool -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.callbacks.base import BaseCallbackManager from langchain.callbacks.manager import Callbacks from langchain.chains.api import news_docs, open_meteo_docs, podcast_docs, tmdb_docs diff --git a/libs/langchain/langchain/agents/loading.py b/libs/langchain/langchain/agents/loading.py index 8915b45dea0dd..b3bf515787628 100644 --- a/libs/langchain/langchain/agents/loading.py +++ b/libs/langchain/langchain/agents/loading.py @@ -5,7 +5,7 @@ from typing import Any, List, Optional, Union import yaml -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.agents.agent import BaseMultiActionAgent, BaseSingleActionAgent from langchain.agents.tools import Tool diff --git a/libs/langchain/langchain/agents/mrkl/base.py b/libs/langchain/langchain/agents/mrkl/base.py index 7fef9acdf4ffb..80c51552282d4 100644 --- a/libs/langchain/langchain/agents/mrkl/base.py +++ b/libs/langchain/langchain/agents/mrkl/base.py @@ -3,9 +3,9 @@ from typing import Any, Callable, List, NamedTuple, Optional, Sequence +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import Agent, AgentExecutor, AgentOutputParser from langchain.agents.agent_types import AgentType diff --git a/libs/langchain/langchain/agents/mrkl/output_parser.py b/libs/langchain/langchain/agents/mrkl/output_parser.py index 058ab512b35c9..b716c49db4808 100644 --- a/libs/langchain/langchain/agents/mrkl/output_parser.py +++ b/libs/langchain/langchain/agents/mrkl/output_parser.py @@ -1,7 +1,8 @@ import re from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/openai_assistant/base.py b/libs/langchain/langchain/agents/openai_assistant/base.py index 19edcacec904a..7939542858909 100644 --- a/libs/langchain/langchain/agents/openai_assistant/base.py +++ b/libs/langchain/langchain/agents/openai_assistant/base.py @@ -5,10 +5,10 @@ from time import sleep from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.load import dumpd from langchain_core.pydantic_v1 import Field from langchain_core.runnables import RunnableConfig, RunnableSerializable -from langchain_core.schema.agent import AgentAction, AgentFinish from langchain.callbacks.manager import CallbackManager from langchain.tools.base import BaseTool @@ -103,7 +103,7 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]): from langchain_experimental.openai_assistant import OpenAIAssistantRunnable from langchain.agents import AgentExecutor - from langchain_core.schema.agent import AgentFinish + from langchain_core.agents import AgentFinish from langchain.tools import E2BDataAnalysisTool diff --git a/libs/langchain/langchain/agents/openai_functions_agent/agent_token_buffer_memory.py b/libs/langchain/langchain/agents/openai_functions_agent/agent_token_buffer_memory.py index a94e1555f0996..fe976ddb28643 100644 --- a/libs/langchain/langchain/agents/openai_functions_agent/agent_token_buffer_memory.py +++ b/libs/langchain/langchain/agents/openai_functions_agent/agent_token_buffer_memory.py @@ -1,8 +1,8 @@ """Memory used to save agent output AND intermediate steps.""" from typing import Any, Dict, List -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage, get_buffer_string +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage, get_buffer_string from langchain.agents.format_scratchpad.openai_functions import ( format_to_openai_function_messages, diff --git a/libs/langchain/langchain/agents/openai_functions_agent/base.py b/libs/langchain/langchain/agents/openai_functions_agent/base.py index cfc0100831b46..9045d836c8feb 100644 --- a/libs/langchain/langchain/agents/openai_functions_agent/base.py +++ b/libs/langchain/langchain/agents/openai_functions_agent/base.py @@ -1,6 +1,13 @@ """Module implements an agent that uses OpenAI's APIs function enabled API.""" from typing import Any, List, Optional, Sequence, Tuple, Union +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import ( + BaseMessage, + SystemMessage, +) +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.chat import ( BaseMessagePromptTemplate, ChatPromptTemplate, @@ -8,16 +15,6 @@ MessagesPlaceholder, ) from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import ( - AgentAction, - AgentFinish, - BasePromptTemplate, -) -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import ( - BaseMessage, - SystemMessage, -) from langchain.agents import BaseSingleActionAgent from langchain.agents.format_scratchpad.openai_functions import ( diff --git a/libs/langchain/langchain/agents/openai_functions_multi_agent/base.py b/libs/langchain/langchain/agents/openai_functions_multi_agent/base.py index 2523ce44e2c01..8b8d1da9f095b 100644 --- a/libs/langchain/langchain/agents/openai_functions_multi_agent/base.py +++ b/libs/langchain/langchain/agents/openai_functions_multi_agent/base.py @@ -3,6 +3,15 @@ from json import JSONDecodeError from typing import Any, List, Optional, Sequence, Tuple, Union +from langchain_core.agents import AgentAction, AgentActionMessageLog, AgentFinish +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import ( + AIMessage, + BaseMessage, + SystemMessage, +) +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.chat import ( BaseMessagePromptTemplate, ChatPromptTemplate, @@ -10,19 +19,6 @@ MessagesPlaceholder, ) from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import ( - AgentAction, - AgentFinish, - BasePromptTemplate, - OutputParserException, -) -from langchain_core.schema.agent import AgentActionMessageLog -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import ( - AIMessage, - BaseMessage, - SystemMessage, -) from langchain.agents import BaseMultiActionAgent from langchain.agents.format_scratchpad.openai_functions import ( diff --git a/libs/langchain/langchain/agents/output_parsers/json.py b/libs/langchain/langchain/agents/output_parsers/json.py index 739dd2482c1ff..5fa543ea9dfea 100644 --- a/libs/langchain/langchain/agents/output_parsers/json.py +++ b/libs/langchain/langchain/agents/output_parsers/json.py @@ -3,7 +3,8 @@ import logging from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser from langchain.output_parsers.json import parse_json_markdown diff --git a/libs/langchain/langchain/agents/output_parsers/openai_functions.py b/libs/langchain/langchain/agents/output_parsers/openai_functions.py index ee83eb337b453..38f604723d4a2 100644 --- a/libs/langchain/langchain/agents/output_parsers/openai_functions.py +++ b/libs/langchain/langchain/agents/output_parsers/openai_functions.py @@ -3,17 +3,13 @@ from json import JSONDecodeError from typing import List, Union -from langchain_core.schema import ( - AgentAction, - AgentFinish, - OutputParserException, -) -from langchain_core.schema.agent import AgentActionMessageLog -from langchain_core.schema.messages import ( +from langchain_core.agents import AgentAction, AgentActionMessageLog, AgentFinish +from langchain_core.exceptions import OutputParserException +from langchain_core.messages import ( AIMessage, BaseMessage, ) -from langchain_core.schema.output import ChatGeneration, Generation +from langchain_core.outputs import ChatGeneration, Generation from langchain.agents.agent import AgentOutputParser diff --git a/libs/langchain/langchain/agents/output_parsers/openai_tools.py b/libs/langchain/langchain/agents/output_parsers/openai_tools.py index a92d6f6b6ca94..545d676a12dfb 100644 --- a/libs/langchain/langchain/agents/output_parsers/openai_tools.py +++ b/libs/langchain/langchain/agents/output_parsers/openai_tools.py @@ -3,17 +3,13 @@ from json import JSONDecodeError from typing import List, Union -from langchain_core.schema import ( - AgentAction, - AgentFinish, - OutputParserException, -) -from langchain_core.schema.agent import AgentActionMessageLog -from langchain_core.schema.messages import ( +from langchain_core.agents import AgentAction, AgentActionMessageLog, AgentFinish +from langchain_core.exceptions import OutputParserException +from langchain_core.messages import ( AIMessage, BaseMessage, ) -from langchain_core.schema.output import ChatGeneration, Generation +from langchain_core.outputs import ChatGeneration, Generation from langchain.agents.agent import MultiActionAgentOutputParser diff --git a/libs/langchain/langchain/agents/output_parsers/react_json_single_input.py b/libs/langchain/langchain/agents/output_parsers/react_json_single_input.py index a281388d7089e..c58e1804ab171 100644 --- a/libs/langchain/langchain/agents/output_parsers/react_json_single_input.py +++ b/libs/langchain/langchain/agents/output_parsers/react_json_single_input.py @@ -2,7 +2,8 @@ import re from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser from langchain.agents.chat.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/output_parsers/react_single_input.py b/libs/langchain/langchain/agents/output_parsers/react_single_input.py index 9c201d98c2710..f63821588fe5d 100644 --- a/libs/langchain/langchain/agents/output_parsers/react_single_input.py +++ b/libs/langchain/langchain/agents/output_parsers/react_single_input.py @@ -1,7 +1,8 @@ import re from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/output_parsers/self_ask.py b/libs/langchain/langchain/agents/output_parsers/self_ask.py index ae665a58a2626..e658703f76367 100644 --- a/libs/langchain/langchain/agents/output_parsers/self_ask.py +++ b/libs/langchain/langchain/agents/output_parsers/self_ask.py @@ -1,6 +1,7 @@ from typing import Sequence, Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser diff --git a/libs/langchain/langchain/agents/output_parsers/xml.py b/libs/langchain/langchain/agents/output_parsers/xml.py index 15f0916db7a93..730d069ae6a0f 100644 --- a/libs/langchain/langchain/agents/output_parsers/xml.py +++ b/libs/langchain/langchain/agents/output_parsers/xml.py @@ -1,6 +1,6 @@ from typing import Union -from langchain_core.schema import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain.agents import AgentOutputParser diff --git a/libs/langchain/langchain/agents/react/base.py b/libs/langchain/langchain/agents/react/base.py index 34fad1f6e6df6..3cb5ba203f3c5 100644 --- a/libs/langchain/langchain/agents/react/base.py +++ b/libs/langchain/langchain/agents/react/base.py @@ -1,9 +1,9 @@ """Chain that implements the ReAct paper from https://arxiv.org/pdf/2210.03629.pdf.""" from typing import Any, List, Optional, Sequence +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import Agent, AgentExecutor, AgentOutputParser from langchain.agents.agent_types import AgentType diff --git a/libs/langchain/langchain/agents/react/output_parser.py b/libs/langchain/langchain/agents/react/output_parser.py index fe685aeb7bcfe..ce3161d630713 100644 --- a/libs/langchain/langchain/agents/react/output_parser.py +++ b/libs/langchain/langchain/agents/react/output_parser.py @@ -1,7 +1,8 @@ import re from typing import Union -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.agent import AgentOutputParser diff --git a/libs/langchain/langchain/agents/schema.py b/libs/langchain/langchain/agents/schema.py index ba730f48034b5..6c16cad9e6dd6 100644 --- a/libs/langchain/langchain/agents/schema.py +++ b/libs/langchain/langchain/agents/schema.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Tuple +from langchain_core.agents import AgentAction from langchain_core.prompts.chat import ChatPromptTemplate -from langchain_core.schema import AgentAction class AgentScratchPadChatPromptTemplate(ChatPromptTemplate): diff --git a/libs/langchain/langchain/agents/self_ask_with_search/base.py b/libs/langchain/langchain/agents/self_ask_with_search/base.py index ce2e258174891..5a4109a7cf37b 100644 --- a/libs/langchain/langchain/agents/self_ask_with_search/base.py +++ b/libs/langchain/langchain/agents/self_ask_with_search/base.py @@ -1,9 +1,9 @@ """Chain that does self-ask with search.""" from typing import Any, Sequence, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import Agent, AgentExecutor, AgentOutputParser from langchain.agents.agent_types import AgentType diff --git a/libs/langchain/langchain/agents/structured_chat/base.py b/libs/langchain/langchain/agents/structured_chat/base.py index 0f1158b464908..8a05e93eadf0c 100644 --- a/libs/langchain/langchain/agents/structured_chat/base.py +++ b/libs/langchain/langchain/agents/structured_chat/base.py @@ -1,14 +1,15 @@ import re from typing import Any, List, Optional, Sequence, Tuple +from langchain_core.agents import AgentAction +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, ) from langchain_core.pydantic_v1 import Field -from langchain_core.schema import AgentAction, BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import Agent, AgentOutputParser from langchain.agents.structured_chat.output_parser import ( diff --git a/libs/langchain/langchain/agents/structured_chat/output_parser.py b/libs/langchain/langchain/agents/structured_chat/output_parser.py index 2a961d50fee7f..eb85d6684b6e8 100644 --- a/libs/langchain/langchain/agents/structured_chat/output_parser.py +++ b/libs/langchain/langchain/agents/structured_chat/output_parser.py @@ -5,9 +5,10 @@ import re from typing import Optional, Union +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Field -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException -from langchain_core.schema.language_model import BaseLanguageModel from langchain.agents.agent import AgentOutputParser from langchain.agents.structured_chat.prompt import FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/agents/xml/base.py b/libs/langchain/langchain/agents/xml/base.py index c3b6966f38d5d..5f694402f52c2 100644 --- a/libs/langchain/langchain/agents/xml/base.py +++ b/libs/langchain/langchain/agents/xml/base.py @@ -1,7 +1,7 @@ from typing import Any, List, Tuple, Union +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.prompts.chat import AIMessagePromptTemplate, ChatPromptTemplate -from langchain_core.schema import AgentAction, AgentFinish from langchain.agents.agent import BaseSingleActionAgent from langchain.agents.output_parsers.xml import XMLAgentOutputParser diff --git a/libs/langchain/langchain/base_language.py b/libs/langchain/langchain/base_language.py index 9912c1a95f399..37d1572d19812 100644 --- a/libs/langchain/langchain/base_language.py +++ b/libs/langchain/langchain/base_language.py @@ -1,6 +1,6 @@ """Deprecated module for BaseLanguageModel class, kept for backwards compatibility.""" from __future__ import annotations -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel __all__ = ["BaseLanguageModel"] diff --git a/libs/langchain/langchain/cache.py b/libs/langchain/langchain/cache.py index 46197c119a62a..33bea55247d2c 100644 --- a/libs/langchain/langchain/cache.py +++ b/libs/langchain/langchain/cache.py @@ -51,11 +51,11 @@ except ImportError: from sqlalchemy.ext.declarative import declarative_base +from langchain_core.caches import RETURN_VAL_TYPE, BaseCache +from langchain_core.embeddings import Embeddings from langchain_core.load.dump import dumps from langchain_core.load.load import loads -from langchain_core.schema import ChatGeneration, Generation -from langchain_core.schema.cache import RETURN_VAL_TYPE, BaseCache -from langchain_core.schema.embeddings import Embeddings +from langchain_core.outputs import ChatGeneration, Generation from langchain.llms.base import LLM, get_prompts from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/callbacks/__init__.py b/libs/langchain/langchain/callbacks/__init__.py index c6957bd678eaf..5bd09a2ea007b 100644 --- a/libs/langchain/langchain/callbacks/__init__.py +++ b/libs/langchain/langchain/callbacks/__init__.py @@ -7,6 +7,8 @@ BaseCallbackHandler --> CallbackHandler # Example: AimCallbackHandler """ +from langchain_core.tracers.langchain import LangChainTracer + from langchain.callbacks.aim_callback import AimCallbackHandler from langchain.callbacks.argilla_callback import ArgillaCallbackHandler from langchain.callbacks.arize_callback import ArizeCallbackHandler @@ -38,7 +40,6 @@ FinalStreamingStdOutCallbackHandler, ) from langchain.callbacks.streamlit import LLMThoughtLabeler, StreamlitCallbackHandler -from langchain.callbacks.tracers.langchain import LangChainTracer from langchain.callbacks.trubrics_callback import TrubricsCallbackHandler from langchain.callbacks.wandb_callback import WandbCallbackHandler from langchain.callbacks.whylabs_callback import WhyLabsCallbackHandler diff --git a/libs/langchain/langchain/callbacks/aim_callback.py b/libs/langchain/langchain/callbacks/aim_callback.py index e43f44ea90cef..06e3d52d72f33 100644 --- a/libs/langchain/langchain/callbacks/aim_callback.py +++ b/libs/langchain/langchain/callbacks/aim_callback.py @@ -1,7 +1,8 @@ from copy import deepcopy from typing import Any, Dict, List, Optional -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/argilla_callback.py b/libs/langchain/langchain/callbacks/argilla_callback.py index e02ae12cc5ff8..7934b36fb1404 100644 --- a/libs/langchain/langchain/callbacks/argilla_callback.py +++ b/libs/langchain/langchain/callbacks/argilla_callback.py @@ -2,7 +2,8 @@ import warnings from typing import Any, Dict, List, Optional -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from packaging.version import parse from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/arize_callback.py b/libs/langchain/langchain/callbacks/arize_callback.py index 191e9a1dfa747..c0c81ce330872 100644 --- a/libs/langchain/langchain/callbacks/arize_callback.py +++ b/libs/langchain/langchain/callbacks/arize_callback.py @@ -1,7 +1,8 @@ from datetime import datetime from typing import Any, Dict, List, Optional -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.utils import import_pandas diff --git a/libs/langchain/langchain/callbacks/arthur_callback.py b/libs/langchain/langchain/callbacks/arthur_callback.py index f1d5d39b601e8..cd01c7263fc81 100644 --- a/libs/langchain/langchain/callbacks/arthur_callback.py +++ b/libs/langchain/langchain/callbacks/arthur_callback.py @@ -9,7 +9,8 @@ from typing import TYPE_CHECKING, Any, DefaultDict, Dict, List, Optional import numpy as np -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/clearml_callback.py b/libs/langchain/langchain/callbacks/clearml_callback.py index d3ea80ff4c856..fd3899e804395 100644 --- a/libs/langchain/langchain/callbacks/clearml_callback.py +++ b/libs/langchain/langchain/callbacks/clearml_callback.py @@ -5,7 +5,8 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Sequence -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.utils import ( diff --git a/libs/langchain/langchain/callbacks/comet_ml_callback.py b/libs/langchain/langchain/callbacks/comet_ml_callback.py index d38466524e061..fc28474e1ca40 100644 --- a/libs/langchain/langchain/callbacks/comet_ml_callback.py +++ b/libs/langchain/langchain/callbacks/comet_ml_callback.py @@ -3,7 +3,8 @@ from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence -from langchain_core.schema import AgentAction, AgentFinish, Generation, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import Generation, LLMResult import langchain from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/confident_callback.py b/libs/langchain/langchain/callbacks/confident_callback.py index 3cba3de313cb0..6d0dbf89248c4 100644 --- a/libs/langchain/langchain/callbacks/confident_callback.py +++ b/libs/langchain/langchain/callbacks/confident_callback.py @@ -4,7 +4,8 @@ from typing import Any, Dict, List, Optional, Union from langchain.callbacks.base import BaseCallbackHandler -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult class DeepEvalCallbackHandler(BaseCallbackHandler): diff --git a/libs/langchain/langchain/callbacks/context_callback.py b/libs/langchain/langchain/callbacks/context_callback.py index 550341ab116b8..e1e87ea9cbf43 100644 --- a/libs/langchain/langchain/callbacks/context_callback.py +++ b/libs/langchain/langchain/callbacks/context_callback.py @@ -3,10 +3,8 @@ from typing import Any, Dict, List from uuid import UUID -from langchain_core.schema import ( - BaseMessage, - LLMResult, -) +from langchain_core.messages import BaseMessage +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/file.py b/libs/langchain/langchain/callbacks/file.py index 8e386c96be710..9768a9f031604 100644 --- a/libs/langchain/langchain/callbacks/file.py +++ b/libs/langchain/langchain/callbacks/file.py @@ -1,7 +1,7 @@ """Callback Handler that writes to a file.""" from typing import Any, Dict, Optional, TextIO, cast -from langchain_core.schema import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.utils.input import print_text from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/flyte_callback.py b/libs/langchain/langchain/callbacks/flyte_callback.py index c34ea63d9239b..a09603b693c38 100644 --- a/libs/langchain/langchain/callbacks/flyte_callback.py +++ b/libs/langchain/langchain/callbacks/flyte_callback.py @@ -5,7 +5,8 @@ from copy import deepcopy from typing import TYPE_CHECKING, Any, Dict, List, Tuple -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.utils import ( diff --git a/libs/langchain/langchain/callbacks/infino_callback.py b/libs/langchain/langchain/callbacks/infino_callback.py index 815fa3d22599a..926f381982000 100644 --- a/libs/langchain/langchain/callbacks/infino_callback.py +++ b/libs/langchain/langchain/callbacks/infino_callback.py @@ -1,8 +1,9 @@ import time from typing import Any, Dict, List, Optional, cast -from langchain_core.schema import AgentAction, AgentFinish, LLMResult -from langchain_core.schema.messages import BaseMessage +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.messages import BaseMessage +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/labelstudio_callback.py b/libs/langchain/langchain/callbacks/labelstudio_callback.py index 303a4315bdc4f..e651be068887f 100644 --- a/libs/langchain/langchain/callbacks/labelstudio_callback.py +++ b/libs/langchain/langchain/callbacks/labelstudio_callback.py @@ -5,14 +5,9 @@ from typing import Any, Dict, List, Optional, Tuple, Union from uuid import UUID -from langchain_core.schema import ( - AgentAction, - AgentFinish, - BaseMessage, - ChatMessage, - Generation, - LLMResult, -) +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.messages import BaseMessage, ChatMessage +from langchain_core.outputs import Generation, LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/llmonitor_callback.py b/libs/langchain/langchain/callbacks/llmonitor_callback.py index 8202affa9da1a..e32ee834223ec 100644 --- a/libs/langchain/langchain/callbacks/llmonitor_callback.py +++ b/libs/langchain/langchain/callbacks/llmonitor_callback.py @@ -8,9 +8,9 @@ from uuid import UUID import requests -from langchain_core.schema.agent import AgentAction, AgentFinish -from langchain_core.schema.messages import BaseMessage -from langchain_core.schema.output import LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.messages import BaseMessage +from langchain_core.outputs import LLMResult from packaging.version import parse from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/mlflow_callback.py b/libs/langchain/langchain/callbacks/mlflow_callback.py index 881d674547068..c8762daece09e 100644 --- a/libs/langchain/langchain/callbacks/mlflow_callback.py +++ b/libs/langchain/langchain/callbacks/mlflow_callback.py @@ -7,7 +7,8 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.utils import ( diff --git a/libs/langchain/langchain/callbacks/openai_info.py b/libs/langchain/langchain/callbacks/openai_info.py index 19d40b949576e..a94c890ae80f5 100644 --- a/libs/langchain/langchain/callbacks/openai_info.py +++ b/libs/langchain/langchain/callbacks/openai_info.py @@ -1,7 +1,7 @@ """Callback Handler that prints to std out.""" from typing import Any, Dict, List -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/promptlayer_callback.py b/libs/langchain/langchain/callbacks/promptlayer_callback.py index 567734a7c12bd..2581293c19180 100644 --- a/libs/langchain/langchain/callbacks/promptlayer_callback.py +++ b/libs/langchain/langchain/callbacks/promptlayer_callback.py @@ -5,17 +5,17 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple from uuid import UUID -from langchain_core.schema import ( - ChatGeneration, - LLMResult, -) -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, HumanMessage, SystemMessage, ) +from langchain_core.outputs import ( + ChatGeneration, + LLMResult, +) from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/sagemaker_callback.py b/libs/langchain/langchain/callbacks/sagemaker_callback.py index 913044fc56af3..727c534493d57 100644 --- a/libs/langchain/langchain/callbacks/sagemaker_callback.py +++ b/libs/langchain/langchain/callbacks/sagemaker_callback.py @@ -5,7 +5,8 @@ from copy import deepcopy from typing import Any, Dict, List, Optional -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.utils import ( diff --git a/libs/langchain/langchain/callbacks/streaming_aiter.py b/libs/langchain/langchain/callbacks/streaming_aiter.py index 46dd77b6d5c83..92218af87bbc2 100644 --- a/libs/langchain/langchain/callbacks/streaming_aiter.py +++ b/libs/langchain/langchain/callbacks/streaming_aiter.py @@ -3,7 +3,7 @@ import asyncio from typing import Any, AsyncIterator, Dict, List, Literal, Union, cast -from langchain_core.schema.output import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks.base import AsyncCallbackHandler diff --git a/libs/langchain/langchain/callbacks/streaming_aiter_final_only.py b/libs/langchain/langchain/callbacks/streaming_aiter_final_only.py index 8e93c0e116747..3cb8623b4a60b 100644 --- a/libs/langchain/langchain/callbacks/streaming_aiter_final_only.py +++ b/libs/langchain/langchain/callbacks/streaming_aiter_final_only.py @@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler diff --git a/libs/langchain/langchain/callbacks/streamlit/streamlit_callback_handler.py b/libs/langchain/langchain/callbacks/streamlit/streamlit_callback_handler.py index b8c89eb7e5f02..ef01aa0fb3e16 100644 --- a/libs/langchain/langchain/callbacks/streamlit/streamlit_callback_handler.py +++ b/libs/langchain/langchain/callbacks/streamlit/streamlit_callback_handler.py @@ -5,7 +5,8 @@ from enum import Enum from typing import TYPE_CHECKING, Any, Dict, List, NamedTuple, Optional -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.streamlit.mutable_expander import MutableExpander diff --git a/libs/langchain/langchain/callbacks/tracers/__init__.py b/libs/langchain/langchain/callbacks/tracers/__init__.py index 7ff9ca7f3cc43..dae2ac520a1b8 100644 --- a/libs/langchain/langchain/callbacks/tracers/__init__.py +++ b/libs/langchain/langchain/callbacks/tracers/__init__.py @@ -1,8 +1,8 @@ """Tracers that record execution of LangChain runs.""" -from langchain_core.callbacks.tracers.langchain import LangChainTracer -from langchain_core.callbacks.tracers.langchain_v1 import LangChainTracerV1 -from langchain_core.callbacks.tracers.stdout import ( +from langchain_core.tracers.langchain import LangChainTracer +from langchain_core.tracers.langchain_v1 import LangChainTracerV1 +from langchain_core.tracers.stdout import ( ConsoleCallbackHandler, FunctionCallbackHandler, ) diff --git a/libs/langchain/langchain/callbacks/tracers/base.py b/libs/langchain/langchain/callbacks/tracers/base.py index e2628a9ba1837..1a56fa66688a3 100644 --- a/libs/langchain/langchain/callbacks/tracers/base.py +++ b/libs/langchain/langchain/callbacks/tracers/base.py @@ -1,5 +1,5 @@ """Base interfaces for tracing runs.""" -from langchain_core.callbacks.tracers.base import BaseTracer, TracerException +from langchain_core.tracers.base import BaseTracer, TracerException __all__ = ["BaseTracer", "TracerException"] diff --git a/libs/langchain/langchain/callbacks/tracers/evaluation.py b/libs/langchain/langchain/callbacks/tracers/evaluation.py index 0ee0dbe1b49b0..1617b825a21dc 100644 --- a/libs/langchain/langchain/callbacks/tracers/evaluation.py +++ b/libs/langchain/langchain/callbacks/tracers/evaluation.py @@ -1,5 +1,5 @@ """A tracer that runs evaluators over completed runs.""" -from langchain_core.callbacks.tracers.evaluation import ( +from langchain_core.tracers.evaluation import ( EvaluatorCallbackHandler, wait_for_all_evaluators, ) diff --git a/libs/langchain/langchain/callbacks/tracers/langchain.py b/libs/langchain/langchain/callbacks/tracers/langchain.py index 031b1244f54b4..54ae9dc6cdda4 100644 --- a/libs/langchain/langchain/callbacks/tracers/langchain.py +++ b/libs/langchain/langchain/callbacks/tracers/langchain.py @@ -1,6 +1,6 @@ """A Tracer implementation that records to LangChain endpoint.""" -from langchain_core.callbacks.tracers.langchain import ( +from langchain_core.tracers.langchain import ( LangChainTracer, wait_for_all_tracers, ) diff --git a/libs/langchain/langchain/callbacks/tracers/langchain_v1.py b/libs/langchain/langchain/callbacks/tracers/langchain_v1.py index 7c426f3945e27..a12b47401f75a 100644 --- a/libs/langchain/langchain/callbacks/tracers/langchain_v1.py +++ b/libs/langchain/langchain/callbacks/tracers/langchain_v1.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.langchain_v1 import LangChainTracerV1 +from langchain_core.tracers.langchain_v1 import LangChainTracerV1 __all__ = ["LangChainTracerV1"] diff --git a/libs/langchain/langchain/callbacks/tracers/log_stream.py b/libs/langchain/langchain/callbacks/tracers/log_stream.py index 0878fa575e6bb..22b33c3768d04 100644 --- a/libs/langchain/langchain/callbacks/tracers/log_stream.py +++ b/libs/langchain/langchain/callbacks/tracers/log_stream.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.log_stream import ( +from langchain_core.tracers.log_stream import ( LogEntry, LogStreamCallbackHandler, RunLog, diff --git a/libs/langchain/langchain/callbacks/tracers/root_listeners.py b/libs/langchain/langchain/callbacks/tracers/root_listeners.py index f57b31c938d3e..0dee9bce2d233 100644 --- a/libs/langchain/langchain/callbacks/tracers/root_listeners.py +++ b/libs/langchain/langchain/callbacks/tracers/root_listeners.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.root_listeners import RootListenersTracer +from langchain_core.tracers.root_listeners import RootListenersTracer __all__ = ["RootListenersTracer"] diff --git a/libs/langchain/langchain/callbacks/tracers/run_collector.py b/libs/langchain/langchain/callbacks/tracers/run_collector.py index 1e872946631ee..1240026bfb61e 100644 --- a/libs/langchain/langchain/callbacks/tracers/run_collector.py +++ b/libs/langchain/langchain/callbacks/tracers/run_collector.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.run_collector import RunCollectorCallbackHandler +from langchain_core.tracers.run_collector import RunCollectorCallbackHandler __all__ = ["RunCollectorCallbackHandler"] diff --git a/libs/langchain/langchain/callbacks/tracers/schemas.py b/libs/langchain/langchain/callbacks/tracers/schemas.py index 824e757689505..e8f34027d3411 100644 --- a/libs/langchain/langchain/callbacks/tracers/schemas.py +++ b/libs/langchain/langchain/callbacks/tracers/schemas.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.schemas import ( +from langchain_core.tracers.schemas import ( BaseRun, ChainRun, LLMRun, diff --git a/libs/langchain/langchain/callbacks/tracers/stdout.py b/libs/langchain/langchain/callbacks/tracers/stdout.py index 6294ada57c351..716e2c30ba29e 100644 --- a/libs/langchain/langchain/callbacks/tracers/stdout.py +++ b/libs/langchain/langchain/callbacks/tracers/stdout.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.stdout import ( +from langchain_core.tracers.stdout import ( ConsoleCallbackHandler, FunctionCallbackHandler, ) diff --git a/libs/langchain/langchain/callbacks/tracers/wandb.py b/libs/langchain/langchain/callbacks/tracers/wandb.py index dcc35636960fd..31df3352734a7 100644 --- a/libs/langchain/langchain/callbacks/tracers/wandb.py +++ b/libs/langchain/langchain/callbacks/tracers/wandb.py @@ -14,8 +14,8 @@ Union, ) -from langchain.callbacks.tracers.base import BaseTracer -from langchain.callbacks.tracers.schemas import Run +from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.schemas import Run if TYPE_CHECKING: from wandb import Settings as WBSettings diff --git a/libs/langchain/langchain/callbacks/trubrics_callback.py b/libs/langchain/langchain/callbacks/trubrics_callback.py index 9502e690fd295..4519ad5984b83 100644 --- a/libs/langchain/langchain/callbacks/trubrics_callback.py +++ b/libs/langchain/langchain/callbacks/trubrics_callback.py @@ -2,8 +2,7 @@ from typing import Any, Dict, List, Optional from uuid import UUID -from langchain_core.schema import LLMResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, @@ -11,6 +10,7 @@ HumanMessage, SystemMessage, ) +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler diff --git a/libs/langchain/langchain/callbacks/wandb_callback.py b/libs/langchain/langchain/callbacks/wandb_callback.py index 09baec4ed0f6b..df87d2074f4c7 100644 --- a/libs/langchain/langchain/callbacks/wandb_callback.py +++ b/libs/langchain/langchain/callbacks/wandb_callback.py @@ -4,7 +4,8 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Sequence, Union -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.utils import ( diff --git a/libs/langchain/langchain/chains/api/base.py b/libs/langchain/langchain/chains/api/base.py index 82d129e0fb24d..f677cb800daa5 100644 --- a/libs/langchain/langchain/chains/api/base.py +++ b/libs/langchain/langchain/chains/api/base.py @@ -4,9 +4,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple from urllib.parse import urlparse +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/api/openapi/chain.py b/libs/langchain/langchain/chains/api/openapi/chain.py index 9cf983f978e3e..3d1cec44b7378 100644 --- a/libs/langchain/langchain/chains/api/openapi/chain.py +++ b/libs/langchain/langchain/chains/api/openapi/chain.py @@ -4,8 +4,8 @@ import json from typing import Any, Dict, List, NamedTuple, Optional, cast +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema.language_model import BaseLanguageModel from requests import Response from langchain.callbacks.manager import CallbackManagerForChainRun, Callbacks diff --git a/libs/langchain/langchain/chains/api/openapi/requests_chain.py b/libs/langchain/langchain/chains/api/openapi/requests_chain.py index 4e85345e4ea97..50179cc90294c 100644 --- a/libs/langchain/langchain/chains/api/openapi/requests_chain.py +++ b/libs/langchain/langchain/chains/api/openapi/requests_chain.py @@ -4,9 +4,9 @@ import re from typing import Any +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import BaseOutputParser -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.api.openapi.prompts import REQUEST_TEMPLATE from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/chains/api/openapi/response_chain.py b/libs/langchain/langchain/chains/api/openapi/response_chain.py index 8699d4606db6b..1c6156091f29e 100644 --- a/libs/langchain/langchain/chains/api/openapi/response_chain.py +++ b/libs/langchain/langchain/chains/api/openapi/response_chain.py @@ -4,9 +4,9 @@ import re from typing import Any +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import BaseOutputParser -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.api.openapi.prompts import RESPONSE_TEMPLATE from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/chains/base.py b/libs/langchain/langchain/chains/base.py index 52e5c20c8ff6b..f02a76eaa5ab0 100644 --- a/libs/langchain/langchain/chains/base.py +++ b/libs/langchain/langchain/chains/base.py @@ -10,6 +10,8 @@ import yaml from langchain_core.load.dump import dumpd +from langchain_core.memory import BaseMemory +from langchain_core.outputs import RunInfo from langchain_core.pydantic_v1 import ( BaseModel, Field, @@ -18,7 +20,6 @@ validator, ) from langchain_core.runnables import RunnableConfig, RunnableSerializable -from langchain_core.schema import RUN_KEY, BaseMemory, RunInfo from langchain.callbacks.base import BaseCallbackManager from langchain.callbacks.manager import ( @@ -28,6 +29,7 @@ CallbackManagerForChainRun, Callbacks, ) +from langchain.schema import RUN_KEY logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/chains/combine_documents/refine.py b/libs/langchain/langchain/chains/combine_documents/refine.py index 1c3afbde5c469..de9cded41dac8 100644 --- a/libs/langchain/langchain/chains/combine_documents/refine.py +++ b/libs/langchain/langchain/chains/combine_documents/refine.py @@ -4,9 +4,9 @@ from typing import Any, Dict, List, Tuple +from langchain_core.prompts import BasePromptTemplate, format_document from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema import BasePromptTemplate, format_document from langchain.callbacks.manager import Callbacks from langchain.chains.combine_documents.base import ( diff --git a/libs/langchain/langchain/chains/combine_documents/stuff.py b/libs/langchain/langchain/chains/combine_documents/stuff.py index 8c028a9df19c1..fb52465306ba7 100644 --- a/libs/langchain/langchain/chains/combine_documents/stuff.py +++ b/libs/langchain/langchain/chains/combine_documents/stuff.py @@ -2,9 +2,9 @@ from typing import Any, Dict, List, Optional, Tuple +from langchain_core.prompts import BasePromptTemplate, format_document from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema import BasePromptTemplate, format_document from langchain.callbacks.manager import Callbacks from langchain.chains.combine_documents.base import ( diff --git a/libs/langchain/langchain/chains/constitutional_ai/base.py b/libs/langchain/langchain/chains/constitutional_ai/base.py index 8c2b26ecc60ac..ba8a551737a08 100644 --- a/libs/langchain/langchain/chains/constitutional_ai/base.py +++ b/libs/langchain/langchain/chains/constitutional_ai/base.py @@ -1,8 +1,8 @@ """Chain for applying constitutional principles to the outputs of another chain.""" from typing import Any, Dict, List, Optional -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/conversation/base.py b/libs/langchain/langchain/chains/conversation/base.py index f9575d9133ab4..d05e7920b5ef6 100644 --- a/libs/langchain/langchain/chains/conversation/base.py +++ b/libs/langchain/langchain/chains/conversation/base.py @@ -1,8 +1,9 @@ """Chain that carries on a conversation and calls an LLM.""" from typing import Dict, List +from langchain_core.memory import BaseMemory +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema import BaseMemory, BasePromptTemplate from langchain.chains.conversation.prompt import PROMPT from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/chains/conversational_retrieval/base.py b/libs/langchain/langchain/chains/conversational_retrieval/base.py index 3624c874733a2..3b3fd149fbde2 100644 --- a/libs/langchain/langchain/chains/conversational_retrieval/base.py +++ b/libs/langchain/langchain/chains/conversational_retrieval/base.py @@ -7,12 +7,14 @@ from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union +from langchain_core.documents import Document +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator +from langchain_core.retrievers import BaseRetriever from langchain_core.runnables.config import RunnableConfig -from langchain_core.schema import BasePromptTemplate, BaseRetriever, Document -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/elasticsearch_database/base.py b/libs/langchain/langchain/chains/elasticsearch_database/base.py index 49ee74cb2d981..3eba20afbc22b 100644 --- a/libs/langchain/langchain/chains/elasticsearch_database/base.py +++ b/libs/langchain/langchain/chains/elasticsearch_database/base.py @@ -3,9 +3,10 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseLLMOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import BaseLLMOutputParser, BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/example_generator.py b/libs/langchain/langchain/chains/example_generator.py index da84ad9b49331..8aae0565bfa8a 100644 --- a/libs/langchain/langchain/chains/example_generator.py +++ b/libs/langchain/langchain/chains/example_generator.py @@ -1,8 +1,8 @@ from typing import List +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts.few_shot import FewShotPromptTemplate from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/chains/flare/base.py b/libs/langchain/langchain/chains/flare/base.py index e11b3e0648abb..638212d54efac 100644 --- a/libs/langchain/langchain/chains/flare/base.py +++ b/libs/langchain/langchain/chains/flare/base.py @@ -5,9 +5,11 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple import numpy as np +from langchain_core.language_models import BaseLanguageModel +from langchain_core.outputs import Generation +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate, BaseRetriever, Generation -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( CallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/flare/prompts.py b/libs/langchain/langchain/chains/flare/prompts.py index b5e63d2a97be1..22eda20d9c375 100644 --- a/libs/langchain/langchain/chains/flare/prompts.py +++ b/libs/langchain/langchain/chains/flare/prompts.py @@ -1,7 +1,7 @@ from typing import Tuple +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts import PromptTemplate -from langchain_core.schema import BaseOutputParser class FinishedOutputParser(BaseOutputParser[Tuple[str, bool]]): diff --git a/libs/langchain/langchain/chains/graph_qa/arangodb.py b/libs/langchain/langchain/chains/graph_qa/arangodb.py index 36aa44c186923..ae992d550f2e5 100644 --- a/libs/langchain/langchain/chains/graph_qa/arangodb.py +++ b/libs/langchain/langchain/chains/graph_qa/arangodb.py @@ -4,8 +4,8 @@ import re from typing import Any, Dict, List, Optional +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun diff --git a/libs/langchain/langchain/chains/graph_qa/base.py b/libs/langchain/langchain/chains/graph_qa/base.py index f38902d1ab378..76de72128c35e 100644 --- a/libs/langchain/langchain/chains/graph_qa/base.py +++ b/libs/langchain/langchain/chains/graph_qa/base.py @@ -3,9 +3,9 @@ from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/graph_qa/cypher.py b/libs/langchain/langchain/chains/graph_qa/cypher.py index 43a24955a18ad..91ce4e0454b0a 100644 --- a/libs/langchain/langchain/chains/graph_qa/cypher.py +++ b/libs/langchain/langchain/chains/graph_qa/cypher.py @@ -4,9 +4,9 @@ import re from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/graph_qa/falkordb.py b/libs/langchain/langchain/chains/graph_qa/falkordb.py index 7bf0311848d00..125160fda01c6 100644 --- a/libs/langchain/langchain/chains/graph_qa/falkordb.py +++ b/libs/langchain/langchain/chains/graph_qa/falkordb.py @@ -4,8 +4,8 @@ import re from typing import Any, Dict, List, Optional +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun diff --git a/libs/langchain/langchain/chains/graph_qa/hugegraph.py b/libs/langchain/langchain/chains/graph_qa/hugegraph.py index add6a0c3ad999..3e10adb7dce71 100644 --- a/libs/langchain/langchain/chains/graph_qa/hugegraph.py +++ b/libs/langchain/langchain/chains/graph_qa/hugegraph.py @@ -3,9 +3,9 @@ from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/graph_qa/kuzu.py b/libs/langchain/langchain/chains/graph_qa/kuzu.py index a04eb9ef67a45..f45fb786e1a19 100644 --- a/libs/langchain/langchain/chains/graph_qa/kuzu.py +++ b/libs/langchain/langchain/chains/graph_qa/kuzu.py @@ -3,9 +3,9 @@ from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/graph_qa/nebulagraph.py b/libs/langchain/langchain/chains/graph_qa/nebulagraph.py index 7bb966ca94307..d9c0c09996c48 100644 --- a/libs/langchain/langchain/chains/graph_qa/nebulagraph.py +++ b/libs/langchain/langchain/chains/graph_qa/nebulagraph.py @@ -3,9 +3,9 @@ from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/graph_qa/sparql.py b/libs/langchain/langchain/chains/graph_qa/sparql.py index 7f6336537ec26..9465aebc22009 100644 --- a/libs/langchain/langchain/chains/graph_qa/sparql.py +++ b/libs/langchain/langchain/chains/graph_qa/sparql.py @@ -5,9 +5,9 @@ from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts.base import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/hyde/base.py b/libs/langchain/langchain/chains/hyde/base.py index 07b4e4de7da6a..0d633246cd34d 100644 --- a/libs/langchain/langchain/chains/hyde/base.py +++ b/libs/langchain/langchain/chains/hyde/base.py @@ -7,9 +7,9 @@ from typing import Any, Dict, List, Optional import numpy as np +from langchain_core.embeddings import Embeddings +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Extra -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/llm.py b/libs/langchain/langchain/chains/llm.py index aec49e297032c..784de73a345a5 100644 --- a/libs/langchain/langchain/chains/llm.py +++ b/libs/langchain/langchain/chains/llm.py @@ -4,7 +4,15 @@ import warnings from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast +from langchain_core.language_models import ( + BaseLanguageModel, + LanguageModelInput, +) from langchain_core.load.dump import dumpd +from langchain_core.messages import BaseMessage +from langchain_core.output_parsers import BaseLLMOutputParser, StrOutputParser +from langchain_core.outputs import ChatGeneration, Generation, LLMResult +from langchain_core.prompts import BasePromptTemplate, PromptValue from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import Extra, Field from langchain_core.runnables import ( @@ -14,20 +22,6 @@ RunnableWithFallbacks, ) from langchain_core.runnables.configurable import DynamicRunnable -from langchain_core.schema import ( - BaseLLMOutputParser, - BaseMessage, - BasePromptTemplate, - ChatGeneration, - Generation, - LLMResult, - PromptValue, - StrOutputParser, -) -from langchain_core.schema.language_model import ( - BaseLanguageModel, - LanguageModelInput, -) from langchain_core.utils.input import get_colored_text from langchain.callbacks.manager import ( diff --git a/libs/langchain/langchain/chains/llm_checker/base.py b/libs/langchain/langchain/chains/llm_checker/base.py index 10d59348e513e..c432f4f4219bd 100644 --- a/libs/langchain/langchain/chains/llm_checker/base.py +++ b/libs/langchain/langchain/chains/llm_checker/base.py @@ -4,9 +4,9 @@ import warnings from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/llm_math/base.py b/libs/langchain/langchain/chains/llm_math/base.py index 15a6683cfd764..311df1b8c73d3 100644 --- a/libs/langchain/langchain/chains/llm_math/base.py +++ b/libs/langchain/langchain/chains/llm_math/base.py @@ -6,9 +6,9 @@ import warnings from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/llm_summarization_checker/base.py b/libs/langchain/langchain/chains/llm_summarization_checker/base.py index d075faff535a3..2e58f9ef9e7a6 100644 --- a/libs/langchain/langchain/chains/llm_summarization_checker/base.py +++ b/libs/langchain/langchain/chains/llm_summarization_checker/base.py @@ -6,9 +6,9 @@ from pathlib import Path from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/mapreduce.py b/libs/langchain/langchain/chains/mapreduce.py index a3fe73319a9c9..dd5bf90e259ca 100644 --- a/libs/langchain/langchain/chains/mapreduce.py +++ b/libs/langchain/langchain/chains/mapreduce.py @@ -7,9 +7,9 @@ from typing import Any, Dict, List, Mapping, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Extra -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun, Callbacks from langchain.chains import ReduceDocumentsChain diff --git a/libs/langchain/langchain/chains/natbot/base.py b/libs/langchain/langchain/chains/natbot/base.py index d02bd43c5ad9c..013b281e33d7b 100644 --- a/libs/langchain/langchain/chains/natbot/base.py +++ b/libs/langchain/langchain/chains/natbot/base.py @@ -4,8 +4,8 @@ import warnings from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/openai_functions/base.py b/libs/langchain/langchain/chains/openai_functions/base.py index 5f2acff309094..cac02f4430d7c 100644 --- a/libs/langchain/langchain/chains/openai_functions/base.py +++ b/libs/langchain/langchain/chains/openai_functions/base.py @@ -13,14 +13,14 @@ cast, ) -from langchain_core.prompts import BasePromptTemplate -from langchain_core.pydantic_v1 import BaseModel -from langchain_core.runnables import Runnable -from langchain_core.schema import BaseLLMOutputParser -from langchain_core.schema.output_parser import ( +from langchain_core.output_parsers import ( BaseGenerationOutputParser, + BaseLLMOutputParser, BaseOutputParser, ) +from langchain_core.prompts import BasePromptTemplate +from langchain_core.pydantic_v1 import BaseModel +from langchain_core.runnables import Runnable from langchain.base_language import BaseLanguageModel from langchain.chains import LLMChain diff --git a/libs/langchain/langchain/chains/openai_functions/citation_fuzzy_match.py b/libs/langchain/langchain/chains/openai_functions/citation_fuzzy_match.py index 4aac9a9858866..50149106eb6e4 100644 --- a/libs/langchain/langchain/chains/openai_functions/citation_fuzzy_match.py +++ b/libs/langchain/langchain/chains/openai_functions/citation_fuzzy_match.py @@ -1,9 +1,9 @@ from typing import Iterator, List +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import HumanMessage, SystemMessage from langchain_core.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import HumanMessage, SystemMessage from langchain.chains.llm import LLMChain from langchain.chains.openai_functions.utils import get_llm_kwargs diff --git a/libs/langchain/langchain/chains/openai_functions/extraction.py b/libs/langchain/langchain/chains/openai_functions/extraction.py index cf6b1eea2905b..e36a8416136ff 100644 --- a/libs/langchain/langchain/chains/openai_functions/extraction.py +++ b/libs/langchain/langchain/chains/openai_functions/extraction.py @@ -1,9 +1,8 @@ from typing import Any, List, Optional -from langchain_core.prompts import ChatPromptTemplate +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.base import Chain from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/chains/openai_functions/openapi.py b/libs/langchain/langchain/chains/openai_functions/openapi.py index 1c23c39ca753c..3e011a7b7676d 100644 --- a/libs/langchain/langchain/chains/openai_functions/openapi.py +++ b/libs/langchain/langchain/chains/openai_functions/openapi.py @@ -6,9 +6,8 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union import requests -from langchain_core.prompts import ChatPromptTemplate -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate from langchain_core.utils.input import get_colored_text from requests import Response diff --git a/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py b/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py index cf5a0e6846661..9363429bfba28 100644 --- a/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py +++ b/libs/langchain/langchain/chains/openai_functions/qa_with_structure.py @@ -1,11 +1,11 @@ from typing import Any, List, Optional, Type, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import HumanMessage, SystemMessage +from langchain_core.output_parsers import BaseLLMOutputParser from langchain_core.prompts import PromptTemplate from langchain_core.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BaseLLMOutputParser -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import HumanMessage, SystemMessage from langchain.chains.llm import LLMChain from langchain.chains.openai_functions.utils import get_llm_kwargs diff --git a/libs/langchain/langchain/chains/openai_functions/tagging.py b/libs/langchain/langchain/chains/openai_functions/tagging.py index f5a5a423b27fc..fec8d1ac81f1c 100644 --- a/libs/langchain/langchain/chains/openai_functions/tagging.py +++ b/libs/langchain/langchain/chains/openai_functions/tagging.py @@ -1,7 +1,7 @@ from typing import Any, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import ChatPromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.base import Chain from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/chains/openai_tools/extraction.py b/libs/langchain/langchain/chains/openai_tools/extraction.py index 95bb3e1bf68a4..a644d5768c5cf 100644 --- a/libs/langchain/langchain/chains/openai_tools/extraction.py +++ b/libs/langchain/langchain/chains/openai_tools/extraction.py @@ -1,9 +1,9 @@ from typing import List, Type, Union +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import Runnable -from langchain_core.schema.language_model import BaseLanguageModel from langchain.output_parsers import PydanticToolsParser from langchain.utils.openai_functions import convert_pydantic_to_openai_function diff --git a/libs/langchain/langchain/chains/prompt_selector.py b/libs/langchain/langchain/chains/prompt_selector.py index 19aea96886723..d8bb045f02540 100644 --- a/libs/langchain/langchain/chains/prompt_selector.py +++ b/libs/langchain/langchain/chains/prompt_selector.py @@ -1,9 +1,9 @@ from abc import ABC, abstractmethod from typing import Callable, List, Tuple +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chat_models.base import BaseChatModel from langchain.llms.base import BaseLLM diff --git a/libs/langchain/langchain/chains/qa_generation/base.py b/libs/langchain/langchain/chains/qa_generation/base.py index 2890e85607f0d..cbb0eb36bac1c 100644 --- a/libs/langchain/langchain/chains/qa_generation/base.py +++ b/libs/langchain/langchain/chains/qa_generation/base.py @@ -3,9 +3,9 @@ import json from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/qa_with_sources/base.py b/libs/langchain/langchain/chains/qa_with_sources/base.py index 8997d487f4b23..966f9e1362d2b 100644 --- a/libs/langchain/langchain/chains/qa_with_sources/base.py +++ b/libs/langchain/langchain/chains/qa_with_sources/base.py @@ -7,9 +7,9 @@ from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional, Tuple +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/qa_with_sources/loading.py b/libs/langchain/langchain/chains/qa_with_sources/loading.py index 5c9e12d0feb31..e1e63c0316b23 100644 --- a/libs/langchain/langchain/chains/qa_with_sources/loading.py +++ b/libs/langchain/langchain/chains/qa_with_sources/loading.py @@ -3,8 +3,8 @@ from typing import Any, Mapping, Optional, Protocol -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.prompt_template import BasePromptTemplate +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain diff --git a/libs/langchain/langchain/chains/qa_with_sources/retrieval.py b/libs/langchain/langchain/chains/qa_with_sources/retrieval.py index 7f6f56cccc926..8fb0e6206f364 100644 --- a/libs/langchain/langchain/chains/qa_with_sources/retrieval.py +++ b/libs/langchain/langchain/chains/qa_with_sources/retrieval.py @@ -3,7 +3,7 @@ from typing import Any, Dict, List from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/qa_with_sources/vector_db.py b/libs/langchain/langchain/chains/qa_with_sources/vector_db.py index 1817fb7abc128..2d5c98d189911 100644 --- a/libs/langchain/langchain/chains/qa_with_sources/vector_db.py +++ b/libs/langchain/langchain/chains/qa_with_sources/vector_db.py @@ -4,7 +4,7 @@ from typing import Any, Dict, List from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/query_constructor/base.py b/libs/langchain/langchain/chains/query_constructor/base.py index 4f3b69bed5b50..d6c38d5319d0e 100644 --- a/libs/langchain/langchain/chains/query_constructor/base.py +++ b/libs/langchain/langchain/chains/query_constructor/base.py @@ -4,14 +4,12 @@ import json from typing import Any, Callable, List, Optional, Sequence, Tuple, Union, cast +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.few_shot import FewShotPromptTemplate from langchain_core.runnables import Runnable -from langchain_core.schema import ( - BaseOutputParser, - BasePromptTemplate, - OutputParserException, -) -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.llm import LLMChain from langchain.chains.query_constructor.ir import ( diff --git a/libs/langchain/langchain/chains/question_answering/__init__.py b/libs/langchain/langchain/chains/question_answering/__init__.py index 1625e4e748241..3fd5070f1a7bd 100644 --- a/libs/langchain/langchain/chains/question_answering/__init__.py +++ b/libs/langchain/langchain/chains/question_answering/__init__.py @@ -1,8 +1,8 @@ """Load question answering chains.""" from typing import Any, Mapping, Optional, Protocol -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.prompt_template import BasePromptTemplate +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain.callbacks.base import BaseCallbackManager from langchain.callbacks.manager import Callbacks diff --git a/libs/langchain/langchain/chains/retrieval_qa/base.py b/libs/langchain/langchain/chains/retrieval_qa/base.py index 2fccea39faadf..a7558b87375e6 100644 --- a/libs/langchain/langchain/chains/retrieval_qa/base.py +++ b/libs/langchain/langchain/chains/retrieval_qa/base.py @@ -6,11 +6,12 @@ from abc import abstractmethod from typing import Any, Dict, List, Optional +from langchain_core.documents import Document +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, @@ -199,7 +200,7 @@ class RetrievalQA(BaseRetrievalQA): from langchain.llms import OpenAI from langchain.chains import RetrievalQA from langchain.vectorstores import FAISS - from langchain_core.schema.vectorstore import VectorStoreRetriever + from langchain_core.vectorstores import VectorStoreRetriever retriever = VectorStoreRetriever(vectorstore=FAISS(...)) retrievalQA = RetrievalQA.from_llm(llm=OpenAI(), retriever=retriever) diff --git a/libs/langchain/langchain/chains/router/embedding_router.py b/libs/langchain/langchain/chains/router/embedding_router.py index 69432177f6952..7b43584e941cb 100644 --- a/libs/langchain/langchain/chains/router/embedding_router.py +++ b/libs/langchain/langchain/chains/router/embedding_router.py @@ -2,9 +2,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Type +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Extra -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.router.base import RouterChain diff --git a/libs/langchain/langchain/chains/router/llm_router.py b/libs/langchain/langchain/chains/router/llm_router.py index a6c8ddc05e3cc..03662160c1bd2 100644 --- a/libs/langchain/langchain/chains/router/llm_router.py +++ b/libs/langchain/langchain/chains/router/llm_router.py @@ -3,13 +3,11 @@ from typing import Any, Dict, List, Optional, Type, cast +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import ( - BaseOutputParser, - BasePromptTemplate, - OutputParserException, -) -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/chains/router/multi_prompt.py b/libs/langchain/langchain/chains/router/multi_prompt.py index c28a5d279dc6e..c5b3c0453411b 100644 --- a/libs/langchain/langchain/chains/router/multi_prompt.py +++ b/libs/langchain/langchain/chains/router/multi_prompt.py @@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains import ConversationChain from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/router/multi_retrieval_qa.py b/libs/langchain/langchain/chains/router/multi_retrieval_qa.py index 01b52e7d521f9..17e0a7fe00114 100644 --- a/libs/langchain/langchain/chains/router/multi_retrieval_qa.py +++ b/libs/langchain/langchain/chains/router/multi_retrieval_qa.py @@ -3,9 +3,9 @@ from typing import Any, Dict, List, Mapping, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate -from langchain_core.schema import BaseRetriever -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.retrievers import BaseRetriever from langchain.chains import ConversationChain from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/chains/sql_database/query.py b/libs/langchain/langchain/chains/sql_database/query.py index b3d19f2857575..a8dd685a43609 100644 --- a/libs/langchain/langchain/chains/sql_database/query.py +++ b/libs/langchain/langchain/chains/sql_database/query.py @@ -1,9 +1,9 @@ from typing import List, Optional, TypedDict, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable, RunnableParallel -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.output_parser import NoOpOutputParser -from langchain_core.schema.prompt_template import BasePromptTemplate from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS from langchain.utilities.sql_database import SQLDatabase @@ -79,6 +79,6 @@ def create_sql_query_chain( RunnableParallel(inputs) | prompt_to_use | llm.bind(stop=["\nSQLResult:"]) - | NoOpOutputParser() + | StrOutputParser() | _strip ) diff --git a/libs/langchain/langchain/chains/summarize/__init__.py b/libs/langchain/langchain/chains/summarize/__init__.py index 4d692d30e0c54..ab17d07952c7f 100644 --- a/libs/langchain/langchain/chains/summarize/__init__.py +++ b/libs/langchain/langchain/chains/summarize/__init__.py @@ -1,8 +1,8 @@ """Load summarizing chains.""" from typing import Any, Mapping, Optional, Protocol -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain.callbacks.manager import Callbacks from langchain.chains.combine_documents.base import BaseCombineDocumentsChain diff --git a/libs/langchain/langchain/chat_loaders/base.py b/libs/langchain/langchain/chat_loaders/base.py index 87d3131976b77..7bbdd8894d492 100644 --- a/libs/langchain/langchain/chat_loaders/base.py +++ b/libs/langchain/langchain/chat_loaders/base.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from typing import Iterator, List -from langchain_core.schema.chat import ChatSession +from langchain_core.chat_sessions import ChatSession class BaseChatLoader(ABC): diff --git a/libs/langchain/langchain/chat_loaders/facebook_messenger.py b/libs/langchain/langchain/chat_loaders/facebook_messenger.py index 52fbec5ec4725..77859db338c5a 100644 --- a/libs/langchain/langchain/chat_loaders/facebook_messenger.py +++ b/libs/langchain/langchain/chat_loaders/facebook_messenger.py @@ -3,8 +3,8 @@ from pathlib import Path from typing import Iterator, Union -from langchain_core.schema.chat import ChatSession -from langchain_core.schema.messages import HumanMessage +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import HumanMessage from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_loaders/gmail.py b/libs/langchain/langchain/chat_loaders/gmail.py index b22204110e9ad..1fbf66b67fb3a 100644 --- a/libs/langchain/langchain/chat_loaders/gmail.py +++ b/libs/langchain/langchain/chat_loaders/gmail.py @@ -2,8 +2,8 @@ import re from typing import Any, Iterator -from langchain_core.schema.chat import ChatSession -from langchain_core.schema.messages import HumanMessage +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import HumanMessage from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_loaders/imessage.py b/libs/langchain/langchain/chat_loaders/imessage.py index 319845486ae0a..093d9b15eda60 100644 --- a/libs/langchain/langchain/chat_loaders/imessage.py +++ b/libs/langchain/langchain/chat_loaders/imessage.py @@ -3,8 +3,8 @@ from pathlib import Path from typing import TYPE_CHECKING, Iterator, List, Optional, Union -from langchain_core.schema import HumanMessage -from langchain_core.schema.chat import ChatSession +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import HumanMessage from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_loaders/langsmith.py b/libs/langchain/langchain/chat_loaders/langsmith.py index dfe2df3521c77..a21e0cefcdf6c 100644 --- a/libs/langchain/langchain/chat_loaders/langsmith.py +++ b/libs/langchain/langchain/chat_loaders/langsmith.py @@ -3,8 +3,8 @@ import logging from typing import TYPE_CHECKING, Dict, Iterable, Iterator, List, Optional, Union, cast +from langchain_core.chat_sessions import ChatSession from langchain_core.load import load -from langchain_core.schema.chat import ChatSession from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_loaders/slack.py b/libs/langchain/langchain/chat_loaders/slack.py index 9d5822e590d39..ddf44b56d7bf8 100644 --- a/libs/langchain/langchain/chat_loaders/slack.py +++ b/libs/langchain/langchain/chat_loaders/slack.py @@ -5,8 +5,8 @@ from pathlib import Path from typing import Dict, Iterator, List, Union -from langchain_core.schema import AIMessage, HumanMessage -from langchain_core.schema.chat import ChatSession +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import AIMessage, HumanMessage from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_loaders/telegram.py b/libs/langchain/langchain/chat_loaders/telegram.py index 6661fa0d80f5b..44c7585d3fa6b 100644 --- a/libs/langchain/langchain/chat_loaders/telegram.py +++ b/libs/langchain/langchain/chat_loaders/telegram.py @@ -6,8 +6,8 @@ from pathlib import Path from typing import Iterator, List, Union -from langchain_core.schema import AIMessage, BaseMessage, HumanMessage -from langchain_core.schema.chat import ChatSession +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_loaders/utils.py b/libs/langchain/langchain/chat_loaders/utils.py index bd85dc529c0b5..3fe9384e3d282 100644 --- a/libs/langchain/langchain/chat_loaders/utils.py +++ b/libs/langchain/langchain/chat_loaders/utils.py @@ -2,8 +2,8 @@ from copy import deepcopy from typing import Iterable, Iterator, List -from langchain_core.schema.chat import ChatSession -from langchain_core.schema.messages import AIMessage, BaseMessage +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import AIMessage, BaseMessage def merge_chat_runs_in_session( diff --git a/libs/langchain/langchain/chat_loaders/whatsapp.py b/libs/langchain/langchain/chat_loaders/whatsapp.py index 36638e07c66d1..c96d370298748 100644 --- a/libs/langchain/langchain/chat_loaders/whatsapp.py +++ b/libs/langchain/langchain/chat_loaders/whatsapp.py @@ -4,8 +4,8 @@ import zipfile from typing import Iterator, List, Union -from langchain_core.schema import AIMessage, HumanMessage -from langchain_core.schema.chat import ChatSession +from langchain_core.chat_sessions import ChatSession +from langchain_core.messages import AIMessage, HumanMessage from langchain.chat_loaders.base import BaseChatLoader diff --git a/libs/langchain/langchain/chat_models/anthropic.py b/libs/langchain/langchain/chat_models/anthropic.py index b8cc37474d1d8..4a2a77ef9acb8 100644 --- a/libs/langchain/langchain/chat_models/anthropic.py +++ b/libs/langchain/langchain/chat_models/anthropic.py @@ -1,6 +1,6 @@ from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, cast -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -8,8 +8,8 @@ HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk, ChatResult -from langchain_core.schema.prompt import PromptValue +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.prompts import PromptValue from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/anyscale.py b/libs/langchain/langchain/chat_models/anyscale.py index a94cc48da9774..27efc4a1abfb1 100644 --- a/libs/langchain/langchain/chat_models/anyscale.py +++ b/libs/langchain/langchain/chat_models/anyscale.py @@ -7,8 +7,8 @@ from typing import TYPE_CHECKING, Dict, Optional, Set import requests +from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema.messages import BaseMessage from langchain_core.utils import convert_to_secret_str from langchain.adapters.openai import convert_message_to_dict diff --git a/libs/langchain/langchain/chat_models/azure_openai.py b/libs/langchain/langchain/chat_models/azure_openai.py index bf045c42e9316..8bad52f401110 100644 --- a/libs/langchain/langchain/chat_models/azure_openai.py +++ b/libs/langchain/langchain/chat_models/azure_openai.py @@ -6,8 +6,8 @@ import warnings from typing import Any, Dict, Union +from langchain_core.outputs import ChatResult from langchain_core.pydantic_v1 import BaseModel, Field, root_validator -from langchain_core.schema import ChatResult from langchain.chat_models.openai import ChatOpenAI from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/chat_models/azureml_endpoint.py b/libs/langchain/langchain/chat_models/azureml_endpoint.py index 089acc6605108..675919f277f60 100644 --- a/libs/langchain/langchain/chat_models/azureml_endpoint.py +++ b/libs/langchain/langchain/chat_models/azureml_endpoint.py @@ -1,14 +1,14 @@ import json from typing import Any, Dict, List, Optional, cast -from langchain_core.pydantic_v1 import SecretStr, validator -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, HumanMessage, SystemMessage, ) +from langchain_core.pydantic_v1 import SecretStr, validator from langchain_core.utils import convert_to_secret_str from langchain.callbacks.manager import CallbackManagerForLLMRun diff --git a/libs/langchain/langchain/chat_models/baichuan.py b/libs/langchain/langchain/chat_models/baichuan.py index ee5bb850bbcba..566adcee26e74 100644 --- a/libs/langchain/langchain/chat_models/baichuan.py +++ b/libs/langchain/langchain/chat_models/baichuan.py @@ -5,22 +5,18 @@ from typing import Any, Dict, Iterator, List, Mapping, Optional, Type import requests -from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, - BaseMessage, - ChatGeneration, - ChatMessage, - ChatResult, - HumanMessage, -) -from langchain_core.schema.messages import ( AIMessageChunk, + BaseMessage, BaseMessageChunk, + ChatMessage, ChatMessageChunk, + HumanMessage, HumanMessageChunk, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.utils import ( convert_to_secret_str, get_pydantic_field_names, diff --git a/libs/langchain/langchain/chat_models/baidu_qianfan_endpoint.py b/libs/langchain/langchain/chat_models/baidu_qianfan_endpoint.py index 27f41463ed3d7..9841d4a344a0d 100644 --- a/libs/langchain/langchain/chat_models/baidu_qianfan_endpoint.py +++ b/libs/langchain/langchain/chat_models/baidu_qianfan_endpoint.py @@ -3,9 +3,7 @@ import logging from typing import Any, AsyncIterator, Dict, Iterator, List, Mapping, Optional, cast -from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -14,7 +12,8 @@ HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import Field, root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/base.py b/libs/langchain/langchain/chat_models/base.py index 686b88ece9c6f..1be59da54ce72 100644 --- a/libs/langchain/langchain/chat_models/base.py +++ b/libs/langchain/langchain/chat_models/base.py @@ -1,4 +1,4 @@ -from langchain_core.chat_model import ( +from langchain_core.language_models.chat_models import ( BaseChatModel, SimpleChatModel, _agenerate_from_stream, diff --git a/libs/langchain/langchain/chat_models/bedrock.py b/libs/langchain/langchain/chat_models/bedrock.py index 36ed3fe6eb079..011a7d372de1b 100644 --- a/libs/langchain/langchain/chat_models/bedrock.py +++ b/libs/langchain/langchain/chat_models/bedrock.py @@ -1,8 +1,8 @@ from typing import Any, Dict, Iterator, List, Optional +from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.pydantic_v1 import Extra -from langchain_core.schema.messages import AIMessage, AIMessageChunk, BaseMessage -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk, ChatResult from langchain.callbacks.manager import ( CallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/cohere.py b/libs/langchain/langchain/chat_models/cohere.py index b5bce8ddab135..dbb03d56a817a 100644 --- a/libs/langchain/langchain/chat_models/cohere.py +++ b/libs/langchain/langchain/chat_models/cohere.py @@ -1,6 +1,6 @@ from typing import Any, AsyncIterator, Dict, Iterator, List, Optional -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -8,7 +8,7 @@ HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, @@ -103,7 +103,7 @@ class ChatCohere(BaseChatModel, BaseCohere): .. code-block:: python from langchain.chat_models import ChatCohere - from langchain_core.schema import HumanMessage + from langchain_core.messages import HumanMessage chat = ChatCohere(model="foo") result = chat([HumanMessage(content="Hello")]) diff --git a/libs/langchain/langchain/chat_models/ernie.py b/libs/langchain/langchain/chat_models/ernie.py index 65a037218a559..e23002cd36220 100644 --- a/libs/langchain/langchain/chat_models/ernie.py +++ b/libs/langchain/langchain/chat_models/ernie.py @@ -4,15 +4,14 @@ from typing import Any, Dict, List, Mapping, Optional import requests -from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, BaseMessage, - ChatGeneration, ChatMessage, - ChatResult, HumanMessage, ) +from langchain_core.outputs import ChatGeneration, ChatResult +from langchain_core.pydantic_v1 import root_validator from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.chat_models.base import BaseChatModel diff --git a/libs/langchain/langchain/chat_models/everlyai.py b/libs/langchain/langchain/chat_models/everlyai.py index 5b846fdce9c46..95bddaba5b06d 100644 --- a/libs/langchain/langchain/chat_models/everlyai.py +++ b/libs/langchain/langchain/chat_models/everlyai.py @@ -5,8 +5,8 @@ import sys from typing import TYPE_CHECKING, Dict, Optional, Set +from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema.messages import BaseMessage from langchain.adapters.openai import convert_message_to_dict from langchain.chat_models.openai import ( diff --git a/libs/langchain/langchain/chat_models/fake.py b/libs/langchain/langchain/chat_models/fake.py index 7d7c5330bef59..e155bc35bd85f 100644 --- a/libs/langchain/langchain/chat_models/fake.py +++ b/libs/langchain/langchain/chat_models/fake.py @@ -3,9 +3,8 @@ import time from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union -from langchain_core.schema import ChatResult -from langchain_core.schema.messages import AIMessageChunk, BaseMessage -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk +from langchain_core.messages import AIMessageChunk, BaseMessage +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/fireworks.py b/libs/langchain/langchain/chat_models/fireworks.py index 32ff6d20b915f..0512ea37a77e2 100644 --- a/libs/langchain/langchain/chat_models/fireworks.py +++ b/libs/langchain/langchain/chat_models/fireworks.py @@ -10,8 +10,7 @@ Union, ) -from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -25,7 +24,8 @@ SystemMessage, SystemMessageChunk, ) -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.utils import convert_to_secret_str from langchain.adapters.openai import convert_message_to_dict diff --git a/libs/langchain/langchain/chat_models/gigachat.py b/libs/langchain/langchain/chat_models/gigachat.py index 601309f99a7aa..9b9eabc740878 100644 --- a/libs/langchain/langchain/chat_models/gigachat.py +++ b/libs/langchain/langchain/chat_models/gigachat.py @@ -1,8 +1,7 @@ import logging from typing import Any, AsyncIterator, Iterator, List, Optional -from langchain_core.schema import ChatResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -10,7 +9,7 @@ HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/google_palm.py b/libs/langchain/langchain/chat_models/google_palm.py index ed85c236f6818..52764b16a7e66 100644 --- a/libs/langchain/langchain/chat_models/google_palm.py +++ b/libs/langchain/langchain/chat_models/google_palm.py @@ -4,18 +4,18 @@ import logging from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, cast -from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import ( - ChatGeneration, - ChatResult, -) -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, HumanMessage, SystemMessage, ) +from langchain_core.outputs import ( + ChatGeneration, + ChatResult, +) +from langchain_core.pydantic_v1 import BaseModel, root_validator from tenacity import ( before_sleep_log, retry, diff --git a/libs/langchain/langchain/chat_models/human.py b/libs/langchain/langchain/chat_models/human.py index 98e594a979c53..238ff524874b4 100644 --- a/libs/langchain/langchain/chat_models/human.py +++ b/libs/langchain/langchain/chat_models/human.py @@ -5,14 +5,14 @@ from typing import Any, Callable, Dict, List, Mapping, Optional import yaml -from langchain_core.pydantic_v1 import Field -from langchain_core.schema.messages import ( +from langchain_core.messages import ( BaseMessage, HumanMessage, _message_from_dict, messages_to_dict, ) -from langchain_core.schema.output import ChatGeneration, ChatResult +from langchain_core.outputs import ChatGeneration, ChatResult +from langchain_core.pydantic_v1 import Field from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/hunyuan.py b/libs/langchain/langchain/chat_models/hunyuan.py index 040b1007c033f..baa2f7abde57e 100644 --- a/libs/langchain/langchain/chat_models/hunyuan.py +++ b/libs/langchain/langchain/chat_models/hunyuan.py @@ -8,22 +8,18 @@ from urllib.parse import urlparse import requests -from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, - BaseMessage, - ChatGeneration, - ChatMessage, - ChatResult, - HumanMessage, -) -from langchain_core.schema.messages import ( AIMessageChunk, + BaseMessage, BaseMessageChunk, + ChatMessage, ChatMessageChunk, + HumanMessage, HumanMessageChunk, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.utils import ( convert_to_secret_str, get_pydantic_field_names, diff --git a/libs/langchain/langchain/chat_models/javelin_ai_gateway.py b/libs/langchain/langchain/chat_models/javelin_ai_gateway.py index 48a36a2f5a229..c17e7d4fd6e9f 100644 --- a/libs/langchain/langchain/chat_models/javelin_ai_gateway.py +++ b/libs/langchain/langchain/chat_models/javelin_ai_gateway.py @@ -1,12 +1,7 @@ import logging from typing import Any, Dict, List, Mapping, Optional, cast -from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr -from langchain_core.schema import ( - ChatGeneration, - ChatResult, -) -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, @@ -14,6 +9,11 @@ HumanMessage, SystemMessage, ) +from langchain_core.outputs import ( + ChatGeneration, + ChatResult, +) +from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/jinachat.py b/libs/langchain/langchain/chat_models/jinachat.py index ee4347e5ef310..95aefaaf7bda3 100644 --- a/libs/langchain/langchain/chat_models/jinachat.py +++ b/libs/langchain/langchain/chat_models/jinachat.py @@ -16,25 +16,21 @@ Union, ) -from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, + AIMessageChunk, BaseMessage, - ChatGeneration, + BaseMessageChunk, ChatMessage, - ChatResult, + ChatMessageChunk, FunctionMessage, HumanMessage, - SystemMessage, -) -from langchain_core.schema.messages import ( - AIMessageChunk, - BaseMessageChunk, - ChatMessageChunk, HumanMessageChunk, + SystemMessage, SystemMessageChunk, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.utils import get_pydantic_field_names from tenacity import ( before_sleep_log, diff --git a/libs/langchain/langchain/chat_models/konko.py b/libs/langchain/langchain/chat_models/konko.py index aad978123dfae..6e7c773f11b8c 100644 --- a/libs/langchain/langchain/chat_models/konko.py +++ b/libs/langchain/langchain/chat_models/konko.py @@ -16,10 +16,9 @@ ) import requests +from langchain_core.messages import AIMessageChunk, BaseMessage +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.messages import AIMessageChunk, BaseMessage -from langchain_core.schema.output import ChatGenerationChunk from langchain.adapters.openai import convert_dict_to_message, convert_message_to_dict from langchain.callbacks.manager import ( diff --git a/libs/langchain/langchain/chat_models/litellm.py b/libs/langchain/langchain/chat_models/litellm.py index c5b32fae61d61..cf31721b19fd1 100644 --- a/libs/langchain/langchain/chat_models/litellm.py +++ b/libs/langchain/langchain/chat_models/litellm.py @@ -16,12 +16,7 @@ Union, ) -from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import ( - ChatGeneration, - ChatResult, -) -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -35,7 +30,12 @@ SystemMessage, SystemMessageChunk, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ( + ChatGeneration, + ChatGenerationChunk, + ChatResult, +) +from langchain_core.pydantic_v1 import Field, root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/minimax.py b/libs/langchain/langchain/chat_models/minimax.py index 4e23c17bb2c21..15dba6890385d 100644 --- a/libs/langchain/langchain/chat_models/minimax.py +++ b/libs/langchain/langchain/chat_models/minimax.py @@ -2,12 +2,12 @@ import logging from typing import Any, Dict, List, Optional, cast -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, BaseMessage, - ChatResult, HumanMessage, ) +from langchain_core.outputs import ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/mlflow_ai_gateway.py b/libs/langchain/langchain/chat_models/mlflow_ai_gateway.py index 74ec0def45e9c..131eb8b49be2b 100644 --- a/libs/langchain/langchain/chat_models/mlflow_ai_gateway.py +++ b/libs/langchain/langchain/chat_models/mlflow_ai_gateway.py @@ -3,12 +3,7 @@ from functools import partial from typing import Any, Dict, List, Mapping, Optional -from langchain_core.pydantic_v1 import BaseModel, Extra -from langchain_core.schema import ( - ChatGeneration, - ChatResult, -) -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, @@ -16,6 +11,11 @@ HumanMessage, SystemMessage, ) +from langchain_core.outputs import ( + ChatGeneration, + ChatResult, +) +from langchain_core.pydantic_v1 import BaseModel, Extra from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/ollama.py b/libs/langchain/langchain/chat_models/ollama.py index 27efd7db2c0d8..b356733566d16 100644 --- a/libs/langchain/langchain/chat_models/ollama.py +++ b/libs/langchain/langchain/chat_models/ollama.py @@ -1,8 +1,7 @@ import json from typing import Any, Iterator, List, Optional -from langchain_core.schema import ChatResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -10,7 +9,7 @@ HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGeneration, ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain.callbacks.manager import ( CallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/openai.py b/libs/langchain/langchain/chat_models/openai.py index e3de65d7ebeff..a58427f491dea 100644 --- a/libs/langchain/langchain/chat_models/openai.py +++ b/libs/langchain/langchain/chat_models/openai.py @@ -20,11 +20,8 @@ Union, ) -from langchain_core.pydantic_v1 import BaseModel, Field, root_validator -from langchain_core.runnables import Runnable -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.language_model import LanguageModelInput -from langchain_core.schema.messages import ( +from langchain_core.language_models import LanguageModelInput +from langchain_core.messages import ( AIMessageChunk, BaseMessage, BaseMessageChunk, @@ -34,7 +31,9 @@ SystemMessageChunk, ToolMessageChunk, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import BaseModel, Field, root_validator +from langchain_core.runnables import Runnable from langchain_core.utils import ( get_pydantic_field_names, ) diff --git a/libs/langchain/langchain/chat_models/pai_eas_endpoint.py b/libs/langchain/langchain/chat_models/pai_eas_endpoint.py index 23329144b72b2..4ca1a87bf788b 100644 --- a/libs/langchain/langchain/chat_models/pai_eas_endpoint.py +++ b/libs/langchain/langchain/chat_models/pai_eas_endpoint.py @@ -5,9 +5,7 @@ from typing import Any, AsyncIterator, Dict, List, Optional, cast import requests -from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -15,7 +13,8 @@ HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/promptlayer_openai.py b/libs/langchain/langchain/chat_models/promptlayer_openai.py index 9db78029ba7a8..e7146b6ecaa30 100644 --- a/libs/langchain/langchain/chat_models/promptlayer_openai.py +++ b/libs/langchain/langchain/chat_models/promptlayer_openai.py @@ -2,8 +2,8 @@ import datetime from typing import Any, Dict, List, Optional -from langchain_core.schema import ChatResult -from langchain_core.schema.messages import BaseMessage +from langchain_core.messages import BaseMessage +from langchain_core.outputs import ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/tongyi.py b/libs/langchain/langchain/chat_models/tongyi.py index c079349840c2f..3befee8004645 100644 --- a/libs/langchain/langchain/chat_models/tongyi.py +++ b/libs/langchain/langchain/chat_models/tongyi.py @@ -13,9 +13,7 @@ Type, ) -from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, @@ -29,7 +27,13 @@ SystemMessage, SystemMessageChunk, ) -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk +from langchain_core.outputs import ( + ChatGeneration, + ChatGenerationChunk, + ChatResult, + GenerationChunk, +) +from langchain_core.pydantic_v1 import Field, root_validator from requests.exceptions import HTTPError from tenacity import ( RetryCallState, diff --git a/libs/langchain/langchain/chat_models/vertexai.py b/libs/langchain/langchain/chat_models/vertexai.py index 54077319dd41d..d0260b5ab9841 100644 --- a/libs/langchain/langchain/chat_models/vertexai.py +++ b/libs/langchain/langchain/chat_models/vertexai.py @@ -5,16 +5,15 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Union, cast -from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, HumanMessage, SystemMessage, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/chat_models/yandex.py b/libs/langchain/langchain/chat_models/yandex.py index c789ffa0819c6..5870ef7cd0762 100644 --- a/libs/langchain/langchain/chat_models/yandex.py +++ b/libs/langchain/langchain/chat_models/yandex.py @@ -2,14 +2,13 @@ import logging from typing import Any, Dict, List, Optional, Tuple, cast -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, BaseMessage, - ChatGeneration, - ChatResult, HumanMessage, SystemMessage, ) +from langchain_core.outputs import ChatGeneration, ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/docstore/arbitrary_fn.py b/libs/langchain/langchain/docstore/arbitrary_fn.py index 179a2773c8aa8..8f73f6a5cbead 100644 --- a/libs/langchain/langchain/docstore/arbitrary_fn.py +++ b/libs/langchain/langchain/docstore/arbitrary_fn.py @@ -1,6 +1,6 @@ from typing import Callable, Union -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.docstore.base import Docstore diff --git a/libs/langchain/langchain/docstore/document.py b/libs/langchain/langchain/docstore/document.py index a2825e674d8b1..88aebd279509a 100644 --- a/libs/langchain/langchain/docstore/document.py +++ b/libs/langchain/langchain/docstore/document.py @@ -1,3 +1,3 @@ -from langchain_core.schema import Document +from langchain_core.documents import Document __all__ = ["Document"] diff --git a/libs/langchain/langchain/document_loaders/apify_dataset.py b/libs/langchain/langchain/document_loaders/apify_dataset.py index 2e273966310c8..3cde43faf10df 100644 --- a/libs/langchain/langchain/document_loaders/apify_dataset.py +++ b/libs/langchain/langchain/document_loaders/apify_dataset.py @@ -15,7 +15,7 @@ class ApifyDatasetLoader(BaseLoader, BaseModel): .. code-block:: python from langchain.document_loaders import ApifyDatasetLoader - from langchain_core.schema import Document + from langchain_core.documents import Document loader = ApifyDatasetLoader( dataset_id="YOUR-DATASET-ID", diff --git a/libs/langchain/langchain/document_loaders/base.py b/libs/langchain/langchain/document_loaders/base.py index ae036e2c67c6b..aea42b391cde9 100644 --- a/libs/langchain/langchain/document_loaders/base.py +++ b/libs/langchain/langchain/document_loaders/base.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from typing import Iterator, List, Optional -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.blob_loaders import Blob from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter diff --git a/libs/langchain/langchain/document_loaders/concurrent.py b/libs/langchain/langchain/document_loaders/concurrent.py index e6f01599c5abb..5870c794e45e8 100644 --- a/libs/langchain/langchain/document_loaders/concurrent.py +++ b/libs/langchain/langchain/document_loaders/concurrent.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Iterator, Literal, Optional, Sequence, Union -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import BlobLoader, FileSystemBlobLoader diff --git a/libs/langchain/langchain/document_loaders/generic.py b/libs/langchain/langchain/document_loaders/generic.py index f5ee0d2ff6725..88d262faee621 100644 --- a/libs/langchain/langchain/document_loaders/generic.py +++ b/libs/langchain/langchain/document_loaders/generic.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import Iterator, List, Literal, Optional, Sequence, Union -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser, BaseLoader from langchain.document_loaders.blob_loaders import BlobLoader, FileSystemBlobLoader diff --git a/libs/langchain/langchain/document_loaders/joplin.py b/libs/langchain/langchain/document_loaders/joplin.py index 3f16d25c928b4..c3f171f75596f 100644 --- a/libs/langchain/langchain/document_loaders/joplin.py +++ b/libs/langchain/langchain/document_loaders/joplin.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import Iterator, List, Optional -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseLoader from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/document_loaders/lakefs.py b/libs/langchain/langchain/document_loaders/lakefs.py index 05b0ca9c08161..b593e6095ecda 100644 --- a/libs/langchain/langchain/document_loaders/lakefs.py +++ b/libs/langchain/langchain/document_loaders/lakefs.py @@ -5,7 +5,7 @@ from urllib.parse import urljoin import requests -from langchain_core.schema import Document +from langchain_core.documents import Document from requests.auth import HTTPBasicAuth from langchain.document_loaders.base import BaseLoader diff --git a/libs/langchain/langchain/document_loaders/parsers/audio.py b/libs/langchain/langchain/document_loaders/parsers/audio.py index 720ed899cfbd2..073eb82ea0883 100644 --- a/libs/langchain/langchain/document_loaders/parsers/audio.py +++ b/libs/langchain/langchain/document_loaders/parsers/audio.py @@ -2,7 +2,7 @@ import time from typing import Dict, Iterator, Optional, Tuple -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob diff --git a/libs/langchain/langchain/document_loaders/parsers/generic.py b/libs/langchain/langchain/document_loaders/parsers/generic.py index ff433a540c5ba..81ebe7897f1eb 100644 --- a/libs/langchain/langchain/document_loaders/parsers/generic.py +++ b/libs/langchain/langchain/document_loaders/parsers/generic.py @@ -4,7 +4,7 @@ """ from typing import Iterator, Mapping, Optional -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders.schema import Blob diff --git a/libs/langchain/langchain/document_loaders/parsers/msword.py b/libs/langchain/langchain/document_loaders/parsers/msword.py index 8bced491ed13a..bcb62d7e30bb8 100644 --- a/libs/langchain/langchain/document_loaders/parsers/msword.py +++ b/libs/langchain/langchain/document_loaders/parsers/msword.py @@ -1,6 +1,6 @@ from typing import Iterator -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob diff --git a/libs/langchain/langchain/document_loaders/parsers/pdf.py b/libs/langchain/langchain/document_loaders/parsers/pdf.py index 722b8b2c79e5d..52d47e1f2fff0 100644 --- a/libs/langchain/langchain/document_loaders/parsers/pdf.py +++ b/libs/langchain/langchain/document_loaders/parsers/pdf.py @@ -15,7 +15,7 @@ from urllib.parse import urlparse import numpy as np -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob diff --git a/libs/langchain/langchain/document_loaders/parsers/txt.py b/libs/langchain/langchain/document_loaders/parsers/txt.py index 81ef207139184..a39071a5f1e98 100644 --- a/libs/langchain/langchain/document_loaders/parsers/txt.py +++ b/libs/langchain/langchain/document_loaders/parsers/txt.py @@ -1,7 +1,7 @@ """Module for parsing text files..""" from typing import Iterator -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob diff --git a/libs/langchain/langchain/document_loaders/rocksetdb.py b/libs/langchain/langchain/document_loaders/rocksetdb.py index cdfe4313e4460..b0e4127bbfb9a 100644 --- a/libs/langchain/langchain/document_loaders/rocksetdb.py +++ b/libs/langchain/langchain/document_loaders/rocksetdb.py @@ -1,6 +1,6 @@ from typing import Any, Callable, Iterator, List, Optional, Tuple -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseLoader diff --git a/libs/langchain/langchain/document_loaders/sitemap.py b/libs/langchain/langchain/document_loaders/sitemap.py index 4510d50090467..b44b28e0b2d2f 100644 --- a/libs/langchain/langchain/document_loaders/sitemap.py +++ b/libs/langchain/langchain/document_loaders/sitemap.py @@ -3,7 +3,7 @@ from typing import Any, Callable, Generator, Iterable, List, Optional, Tuple from urllib.parse import urlparse -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.web_base import WebBaseLoader diff --git a/libs/langchain/langchain/document_loaders/tensorflow_datasets.py b/libs/langchain/langchain/document_loaders/tensorflow_datasets.py index d70fc0da806cb..a41670ac54a96 100644 --- a/libs/langchain/langchain/document_loaders/tensorflow_datasets.py +++ b/libs/langchain/langchain/document_loaders/tensorflow_datasets.py @@ -1,6 +1,6 @@ from typing import Callable, Dict, Iterator, List, Optional -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseLoader from langchain.utilities.tensorflow_datasets import TensorflowDatasets diff --git a/libs/langchain/langchain/document_transformers/beautiful_soup_transformer.py b/libs/langchain/langchain/document_transformers/beautiful_soup_transformer.py index 30f574c8f3e2b..459ca55c95e4a 100644 --- a/libs/langchain/langchain/document_transformers/beautiful_soup_transformer.py +++ b/libs/langchain/langchain/document_transformers/beautiful_soup_transformer.py @@ -1,6 +1,7 @@ from typing import Any, Iterator, List, Sequence, cast -from langchain_core.schema import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document class BeautifulSoupTransformer(BaseDocumentTransformer): diff --git a/libs/langchain/langchain/document_transformers/doctran_text_extract.py b/libs/langchain/langchain/document_transformers/doctran_text_extract.py index 7a951dc5abe06..25c8581173603 100644 --- a/libs/langchain/langchain/document_transformers/doctran_text_extract.py +++ b/libs/langchain/langchain/document_transformers/doctran_text_extract.py @@ -1,6 +1,7 @@ from typing import Any, List, Optional, Sequence -from langchain_core.schema import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/document_transformers/doctran_text_qa.py b/libs/langchain/langchain/document_transformers/doctran_text_qa.py index 463d7a93c49bf..bcd6b3ce6b0b9 100644 --- a/libs/langchain/langchain/document_transformers/doctran_text_qa.py +++ b/libs/langchain/langchain/document_transformers/doctran_text_qa.py @@ -1,6 +1,7 @@ from typing import Any, Optional, Sequence -from langchain_core.schema import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/document_transformers/doctran_text_translate.py b/libs/langchain/langchain/document_transformers/doctran_text_translate.py index 0b685b311e976..d59c1bea00473 100644 --- a/libs/langchain/langchain/document_transformers/doctran_text_translate.py +++ b/libs/langchain/langchain/document_transformers/doctran_text_translate.py @@ -1,6 +1,7 @@ from typing import Any, Optional, Sequence -from langchain_core.schema import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/document_transformers/embeddings_redundant_filter.py b/libs/langchain/langchain/document_transformers/embeddings_redundant_filter.py index e8f29091796bb..0e504881f4dbd 100644 --- a/libs/langchain/langchain/document_transformers/embeddings_redundant_filter.py +++ b/libs/langchain/langchain/document_transformers/embeddings_redundant_filter.py @@ -2,9 +2,10 @@ from typing import Any, Callable, List, Sequence import numpy as np +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BaseDocumentTransformer, Document -from langchain_core.schema.embeddings import Embeddings from langchain.utils.math import cosine_similarity diff --git a/libs/langchain/langchain/document_transformers/google_translate.py b/libs/langchain/langchain/document_transformers/google_translate.py index de7f991c0894d..098cbafbbe196 100644 --- a/libs/langchain/langchain/document_transformers/google_translate.py +++ b/libs/langchain/langchain/document_transformers/google_translate.py @@ -1,6 +1,7 @@ from typing import Any, Optional, Sequence -from langchain_core.schema import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain.utilities.vertexai import get_client_info diff --git a/libs/langchain/langchain/document_transformers/html2text.py b/libs/langchain/langchain/document_transformers/html2text.py index 9ee10408b3eef..d8455af288224 100644 --- a/libs/langchain/langchain/document_transformers/html2text.py +++ b/libs/langchain/langchain/document_transformers/html2text.py @@ -1,6 +1,7 @@ from typing import Any, Sequence -from langchain_core.schema import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document class Html2TextTransformer(BaseDocumentTransformer): diff --git a/libs/langchain/langchain/document_transformers/long_context_reorder.py b/libs/langchain/langchain/document_transformers/long_context_reorder.py index 97c3bae9cde77..e76027c78c01f 100644 --- a/libs/langchain/langchain/document_transformers/long_context_reorder.py +++ b/libs/langchain/langchain/document_transformers/long_context_reorder.py @@ -1,8 +1,9 @@ """Reorder documents""" from typing import Any, List, Sequence +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema import BaseDocumentTransformer, Document def _litm_reordering(documents: List[Document]) -> List[Document]: diff --git a/libs/langchain/langchain/document_transformers/nuclia_text_transform.py b/libs/langchain/langchain/document_transformers/nuclia_text_transform.py index 2bf4fa856466b..03fc8d07e6c27 100644 --- a/libs/langchain/langchain/document_transformers/nuclia_text_transform.py +++ b/libs/langchain/langchain/document_transformers/nuclia_text_transform.py @@ -3,7 +3,8 @@ import uuid from typing import Any, Sequence -from langchain_core.schema.document import BaseDocumentTransformer, Document +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain.tools.nuclia.tool import NucliaUnderstandingAPI diff --git a/libs/langchain/langchain/document_transformers/openai_functions.py b/libs/langchain/langchain/document_transformers/openai_functions.py index 188583c3c479c..17b10690fe914 100644 --- a/libs/langchain/langchain/document_transformers/openai_functions.py +++ b/libs/langchain/langchain/document_transformers/openai_functions.py @@ -1,10 +1,11 @@ """Document transformers that use OpenAI Functions models""" from typing import Any, Dict, Optional, Sequence, Type, Union +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema import BaseDocumentTransformer, Document -from langchain_core.schema.language_model import BaseLanguageModel from langchain.chains.llm import LLMChain from langchain.chains.openai_functions import create_tagging_chain @@ -18,7 +19,7 @@ class OpenAIMetadataTagger(BaseDocumentTransformer, BaseModel): from langchain.chat_models import ChatOpenAI from langchain.document_transformers import OpenAIMetadataTagger - from langchain_core.schema import Document + from langchain_core.documents import Document schema = { "properties": { @@ -101,7 +102,7 @@ def create_metadata_tagger( from langchain.chat_models import ChatOpenAI from langchain.document_transformers import create_metadata_tagger - from langchain_core.schema import Document + from langchain_core.documents import Document schema = { "properties": { diff --git a/libs/langchain/langchain/embeddings/aleph_alpha.py b/libs/langchain/langchain/embeddings/aleph_alpha.py index a28a49c38e636..743c652ecf0b7 100644 --- a/libs/langchain/langchain/embeddings/aleph_alpha.py +++ b/libs/langchain/langchain/embeddings/aleph_alpha.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/awa.py b/libs/langchain/langchain/embeddings/awa.py index 1110537336352..9145d8006a7bd 100644 --- a/libs/langchain/langchain/embeddings/awa.py +++ b/libs/langchain/langchain/embeddings/awa.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings class AwaEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/baidu_qianfan_endpoint.py b/libs/langchain/langchain/embeddings/baidu_qianfan_endpoint.py index d440cd497a265..6f4f0806ce817 100644 --- a/libs/langchain/langchain/embeddings/baidu_qianfan_endpoint.py +++ b/libs/langchain/langchain/embeddings/baidu_qianfan_endpoint.py @@ -3,8 +3,8 @@ import logging from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/base.py b/libs/langchain/langchain/embeddings/base.py index 60ad5dedbf334..9e648a342eab1 100644 --- a/libs/langchain/langchain/embeddings/base.py +++ b/libs/langchain/langchain/embeddings/base.py @@ -1,4 +1,4 @@ -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings # This is for backwards compatibility __all__ = ["Embeddings"] diff --git a/libs/langchain/langchain/embeddings/bedrock.py b/libs/langchain/langchain/embeddings/bedrock.py index 825f14545b5d1..15104c29fc610 100644 --- a/libs/langchain/langchain/embeddings/bedrock.py +++ b/libs/langchain/langchain/embeddings/bedrock.py @@ -4,8 +4,8 @@ from functools import partial from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings class BedrockEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/cache.py b/libs/langchain/langchain/embeddings/cache.py index 621c1d4bc3019..75f1992e867d9 100644 --- a/libs/langchain/langchain/embeddings/cache.py +++ b/libs/langchain/langchain/embeddings/cache.py @@ -14,8 +14,8 @@ from functools import partial from typing import Callable, List, Sequence, Union, cast -from langchain_core.schema import BaseStore -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings +from langchain_core.stores import BaseStore from langchain.storage.encoder_backed import EncoderBackedStore diff --git a/libs/langchain/langchain/embeddings/clarifai.py b/libs/langchain/langchain/embeddings/clarifai.py index 9a3ccac7a2658..805cb04e6aade 100644 --- a/libs/langchain/langchain/embeddings/clarifai.py +++ b/libs/langchain/langchain/embeddings/clarifai.py @@ -1,8 +1,8 @@ import logging from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/cohere.py b/libs/langchain/langchain/embeddings/cohere.py index dd7b74b5cdb9b..0f2f778827ded 100644 --- a/libs/langchain/langchain/embeddings/cohere.py +++ b/libs/langchain/langchain/embeddings/cohere.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/dashscope.py b/libs/langchain/langchain/embeddings/dashscope.py index 60a64fc57cd97..fa79e42efc53d 100644 --- a/libs/langchain/langchain/embeddings/dashscope.py +++ b/libs/langchain/langchain/embeddings/dashscope.py @@ -9,8 +9,8 @@ Optional, ) +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from requests.exceptions import HTTPError from tenacity import ( before_sleep_log, diff --git a/libs/langchain/langchain/embeddings/deepinfra.py b/libs/langchain/langchain/embeddings/deepinfra.py index 20d57e5129633..b5ae0e8013e3c 100644 --- a/libs/langchain/langchain/embeddings/deepinfra.py +++ b/libs/langchain/langchain/embeddings/deepinfra.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Mapping, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/edenai.py b/libs/langchain/langchain/embeddings/edenai.py index da5869e0480b0..1faadd82e513f 100644 --- a/libs/langchain/langchain/embeddings/edenai.py +++ b/libs/langchain/langchain/embeddings/edenai.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utilities.requests import Requests from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/elasticsearch.py b/libs/langchain/langchain/embeddings/elasticsearch.py index fb0db428597fc..d34daa1e707f6 100644 --- a/libs/langchain/langchain/embeddings/elasticsearch.py +++ b/libs/langchain/langchain/embeddings/elasticsearch.py @@ -8,7 +8,7 @@ from elasticsearch import Elasticsearch from elasticsearch.client import MlClient -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings class ElasticsearchEmbeddings(Embeddings): diff --git a/libs/langchain/langchain/embeddings/embaas.py b/libs/langchain/langchain/embeddings/embaas.py index 3f054764fa11e..df7696467e776 100644 --- a/libs/langchain/langchain/embeddings/embaas.py +++ b/libs/langchain/langchain/embeddings/embaas.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Mapping, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from typing_extensions import NotRequired, TypedDict from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/ernie.py b/libs/langchain/langchain/embeddings/ernie.py index 9c147b00b4db8..1c9a0c76c5006 100644 --- a/libs/langchain/langchain/embeddings/ernie.py +++ b/libs/langchain/langchain/embeddings/ernie.py @@ -5,8 +5,8 @@ from typing import Dict, List, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/fake.py b/libs/langchain/langchain/embeddings/fake.py index 649aa93c36708..fcc33496aa863 100644 --- a/libs/langchain/langchain/embeddings/fake.py +++ b/libs/langchain/langchain/embeddings/fake.py @@ -2,8 +2,8 @@ from typing import List import numpy as np +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema.embeddings import Embeddings class FakeEmbeddings(Embeddings, BaseModel): diff --git a/libs/langchain/langchain/embeddings/fastembed.py b/libs/langchain/langchain/embeddings/fastembed.py index ed97526fdeff6..ca4a989f1d6d9 100644 --- a/libs/langchain/langchain/embeddings/fastembed.py +++ b/libs/langchain/langchain/embeddings/fastembed.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Literal, Optional import numpy as np +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings class FastEmbedEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/google_palm.py b/libs/langchain/langchain/embeddings/google_palm.py index afb38763a8e74..a61dbd3b2997f 100644 --- a/libs/langchain/langchain/embeddings/google_palm.py +++ b/libs/langchain/langchain/embeddings/google_palm.py @@ -3,8 +3,8 @@ import logging from typing import Any, Callable, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings from tenacity import ( before_sleep_log, retry, diff --git a/libs/langchain/langchain/embeddings/gpt4all.py b/libs/langchain/langchain/embeddings/gpt4all.py index e0572f60cad1b..b11fc1841c8e5 100644 --- a/libs/langchain/langchain/embeddings/gpt4all.py +++ b/libs/langchain/langchain/embeddings/gpt4all.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings class GPT4AllEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/gradient_ai.py b/libs/langchain/langchain/embeddings/gradient_ai.py index 290b0d1219e27..935584190fd16 100644 --- a/libs/langchain/langchain/embeddings/gradient_ai.py +++ b/libs/langchain/langchain/embeddings/gradient_ai.py @@ -7,8 +7,8 @@ import aiohttp import numpy as np import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/huggingface.py b/libs/langchain/langchain/embeddings/huggingface.py index 81ce689d8bcb2..823e1902a3c15 100644 --- a/libs/langchain/langchain/embeddings/huggingface.py +++ b/libs/langchain/langchain/embeddings/huggingface.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, Field -from langchain_core.schema.embeddings import Embeddings DEFAULT_MODEL_NAME = "sentence-transformers/all-mpnet-base-v2" DEFAULT_INSTRUCT_MODEL = "hkunlp/instructor-large" diff --git a/libs/langchain/langchain/embeddings/huggingface_hub.py b/libs/langchain/langchain/embeddings/huggingface_hub.py index 2c14614eea797..c864d673d91c4 100644 --- a/libs/langchain/langchain/embeddings/huggingface_hub.py +++ b/libs/langchain/langchain/embeddings/huggingface_hub.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/javelin_ai_gateway.py b/libs/langchain/langchain/embeddings/javelin_ai_gateway.py index 871b1838b10ad..83d0390f88ad5 100644 --- a/libs/langchain/langchain/embeddings/javelin_ai_gateway.py +++ b/libs/langchain/langchain/embeddings/javelin_ai_gateway.py @@ -2,8 +2,8 @@ from typing import Any, Iterator, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema.embeddings import Embeddings def _chunk(texts: List[str], size: int) -> Iterator[List[str]]: diff --git a/libs/langchain/langchain/embeddings/jina.py b/libs/langchain/langchain/embeddings/jina.py index c94728cb4f5da..a781f908f2d38 100644 --- a/libs/langchain/langchain/embeddings/jina.py +++ b/libs/langchain/langchain/embeddings/jina.py @@ -2,8 +2,8 @@ from typing import Any, Dict, List, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/llamacpp.py b/libs/langchain/langchain/embeddings/llamacpp.py index b2dbda8a42cfa..63e6c7f77b828 100644 --- a/libs/langchain/langchain/embeddings/llamacpp.py +++ b/libs/langchain/langchain/embeddings/llamacpp.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.embeddings import Embeddings class LlamaCppEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/llm_rails.py b/libs/langchain/langchain/embeddings/llm_rails.py index 804f8224aa760..6d4fbf46e620b 100644 --- a/libs/langchain/langchain/embeddings/llm_rails.py +++ b/libs/langchain/langchain/embeddings/llm_rails.py @@ -4,8 +4,8 @@ from typing import List, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra -from langchain_core.schema.embeddings import Embeddings class LLMRailsEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/localai.py b/libs/langchain/langchain/embeddings/localai.py index 0f0e9ac1c929e..4a35d9e83c69f 100644 --- a/libs/langchain/langchain/embeddings/localai.py +++ b/libs/langchain/langchain/embeddings/localai.py @@ -15,8 +15,8 @@ Union, ) +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain_core.utils import get_pydantic_field_names from tenacity import ( AsyncRetrying, diff --git a/libs/langchain/langchain/embeddings/minimax.py b/libs/langchain/langchain/embeddings/minimax.py index 3bc2840cf136d..6c239a9398fca 100644 --- a/libs/langchain/langchain/embeddings/minimax.py +++ b/libs/langchain/langchain/embeddings/minimax.py @@ -4,8 +4,8 @@ from typing import Any, Callable, Dict, List, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from tenacity import ( before_sleep_log, retry, diff --git a/libs/langchain/langchain/embeddings/mlflow_gateway.py b/libs/langchain/langchain/embeddings/mlflow_gateway.py index 7375bd71d3556..56f5eee1bb49f 100644 --- a/libs/langchain/langchain/embeddings/mlflow_gateway.py +++ b/libs/langchain/langchain/embeddings/mlflow_gateway.py @@ -2,8 +2,8 @@ from typing import Any, Iterator, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema.embeddings import Embeddings def _chunk(texts: List[str], size: int) -> Iterator[List[str]]: diff --git a/libs/langchain/langchain/embeddings/modelscope_hub.py b/libs/langchain/langchain/embeddings/modelscope_hub.py index 23e72da5ab8f7..143eb09270b00 100644 --- a/libs/langchain/langchain/embeddings/modelscope_hub.py +++ b/libs/langchain/langchain/embeddings/modelscope_hub.py @@ -1,7 +1,7 @@ from typing import Any, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra -from langchain_core.schema.embeddings import Embeddings class ModelScopeEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/mosaicml.py b/libs/langchain/langchain/embeddings/mosaicml.py index 72f8c341d1c91..edf4eec4de81c 100644 --- a/libs/langchain/langchain/embeddings/mosaicml.py +++ b/libs/langchain/langchain/embeddings/mosaicml.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Mapping, Optional, Tuple import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/nlpcloud.py b/libs/langchain/langchain/embeddings/nlpcloud.py index 38b44f7975e41..540d952bef408 100644 --- a/libs/langchain/langchain/embeddings/nlpcloud.py +++ b/libs/langchain/langchain/embeddings/nlpcloud.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/octoai_embeddings.py b/libs/langchain/langchain/embeddings/octoai_embeddings.py index 286173054fbad..26f12f46e2b39 100644 --- a/libs/langchain/langchain/embeddings/octoai_embeddings.py +++ b/libs/langchain/langchain/embeddings/octoai_embeddings.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Mapping, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/embeddings/ollama.py b/libs/langchain/langchain/embeddings/ollama.py index d254677361ea5..1938d91640546 100644 --- a/libs/langchain/langchain/embeddings/ollama.py +++ b/libs/langchain/langchain/embeddings/ollama.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Mapping, Optional import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra -from langchain_core.schema.embeddings import Embeddings class OllamaEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/openai.py b/libs/langchain/langchain/embeddings/openai.py index 60d55b1c1880f..d10da4b4e9713 100644 --- a/libs/langchain/langchain/embeddings/openai.py +++ b/libs/langchain/langchain/embeddings/openai.py @@ -20,8 +20,8 @@ ) import numpy as np +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain_core.utils import get_pydantic_field_names from packaging.version import Version, parse from tenacity import ( diff --git a/libs/langchain/langchain/embeddings/sagemaker_endpoint.py b/libs/langchain/langchain/embeddings/sagemaker_endpoint.py index d1f4fe775f130..4054b343da5e8 100644 --- a/libs/langchain/langchain/embeddings/sagemaker_endpoint.py +++ b/libs/langchain/langchain/embeddings/sagemaker_endpoint.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.llms.sagemaker_endpoint import ContentHandlerBase diff --git a/libs/langchain/langchain/embeddings/self_hosted.py b/libs/langchain/langchain/embeddings/self_hosted.py index 5889999160f0b..4fe6bc5a38f1c 100644 --- a/libs/langchain/langchain/embeddings/self_hosted.py +++ b/libs/langchain/langchain/embeddings/self_hosted.py @@ -1,7 +1,7 @@ from typing import Any, Callable, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Extra -from langchain_core.schema.embeddings import Embeddings from langchain.llms.self_hosted import SelfHostedPipeline diff --git a/libs/langchain/langchain/embeddings/spacy_embeddings.py b/libs/langchain/langchain/embeddings/spacy_embeddings.py index 460cba90c3162..eb581d738491a 100644 --- a/libs/langchain/langchain/embeddings/spacy_embeddings.py +++ b/libs/langchain/langchain/embeddings/spacy_embeddings.py @@ -1,8 +1,8 @@ import importlib.util from typing import Any, Dict, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.embeddings import Embeddings class SpacyEmbeddings(BaseModel, Embeddings): diff --git a/libs/langchain/langchain/embeddings/tensorflow_hub.py b/libs/langchain/langchain/embeddings/tensorflow_hub.py index 918bcd0d412a5..0addb2ace81b9 100644 --- a/libs/langchain/langchain/embeddings/tensorflow_hub.py +++ b/libs/langchain/langchain/embeddings/tensorflow_hub.py @@ -1,7 +1,7 @@ from typing import Any, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra -from langchain_core.schema.embeddings import Embeddings DEFAULT_MODEL_URL = "https://tfhub.dev/google/universal-sentence-encoder-multilingual/3" diff --git a/libs/langchain/langchain/embeddings/vertexai.py b/libs/langchain/langchain/embeddings/vertexai.py index de1206fb7ae24..54eb164a34ac7 100644 --- a/libs/langchain/langchain/embeddings/vertexai.py +++ b/libs/langchain/langchain/embeddings/vertexai.py @@ -1,7 +1,7 @@ from typing import Dict, List +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema.embeddings import Embeddings from langchain.llms.vertexai import _VertexAICommon from langchain.utilities.vertexai import raise_vertex_import_error diff --git a/libs/langchain/langchain/embeddings/voyageai.py b/libs/langchain/langchain/embeddings/voyageai.py index 3f07a67bd30ad..85f2b5d413707 100644 --- a/libs/langchain/langchain/embeddings/voyageai.py +++ b/libs/langchain/langchain/embeddings/voyageai.py @@ -14,8 +14,8 @@ ) import requests +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator -from langchain_core.schema.embeddings import Embeddings from langchain_core.utils import convert_to_secret_str from tenacity import ( before_sleep_log, diff --git a/libs/langchain/langchain/embeddings/xinference.py b/libs/langchain/langchain/embeddings/xinference.py index d56bc622d006a..aab938e5506aa 100644 --- a/libs/langchain/langchain/embeddings/xinference.py +++ b/libs/langchain/langchain/embeddings/xinference.py @@ -1,7 +1,7 @@ """Wrapper around Xinference embedding models.""" from typing import Any, List, Optional -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings class XinferenceEmbeddings(Embeddings): diff --git a/libs/langchain/langchain/evaluation/agents/trajectory_eval_chain.py b/libs/langchain/langchain/evaluation/agents/trajectory_eval_chain.py index 3ae5d21d42dd5..8e6f924715fa2 100644 --- a/libs/langchain/langchain/evaluation/agents/trajectory_eval_chain.py +++ b/libs/langchain/langchain/evaluation/agents/trajectory_eval_chain.py @@ -18,9 +18,11 @@ cast, ) +from langchain_core.agents import AgentAction +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.pydantic_v1 import Extra, Field -from langchain_core.schema import AgentAction, BaseOutputParser, OutputParserException -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, diff --git a/libs/langchain/langchain/evaluation/agents/trajectory_eval_prompt.py b/libs/langchain/langchain/evaluation/agents/trajectory_eval_prompt.py index 9037a64aeeebb..03ec89f5e4064 100644 --- a/libs/langchain/langchain/evaluation/agents/trajectory_eval_prompt.py +++ b/libs/langchain/langchain/evaluation/agents/trajectory_eval_prompt.py @@ -1,6 +1,6 @@ """Prompt for trajectory evaluation chain.""" # flake8: noqa -from langchain_core.schema.messages import HumanMessage, AIMessage, SystemMessage +from langchain_core.messages import HumanMessage, AIMessage, SystemMessage from langchain_core.prompts.chat import ( ChatPromptTemplate, diff --git a/libs/langchain/langchain/evaluation/comparison/eval_chain.py b/libs/langchain/langchain/evaluation/comparison/eval_chain.py index 1f06af3851e8b..65f8fc939a8f8 100644 --- a/libs/langchain/langchain/evaluation/comparison/eval_chain.py +++ b/libs/langchain/langchain/evaluation/comparison/eval_chain.py @@ -5,10 +5,10 @@ import re from typing import Any, Dict, List, Optional, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import Extra, Field -from langchain_core.schema import RUN_KEY, BaseOutputParser -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import Callbacks from langchain.chains.constitutional_ai.models import ConstitutionalPrinciple @@ -25,6 +25,7 @@ Criteria, ) from langchain.evaluation.schema import LLMEvalChain, PairwiseStringEvaluator +from langchain.schema import RUN_KEY logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/evaluation/criteria/eval_chain.py b/libs/langchain/langchain/evaluation/criteria/eval_chain.py index f2e0476f79dfe..ac3bb1211c8b0 100644 --- a/libs/langchain/langchain/evaluation/criteria/eval_chain.py +++ b/libs/langchain/langchain/evaluation/criteria/eval_chain.py @@ -4,15 +4,17 @@ from enum import Enum from typing import Any, Dict, List, Mapping, Optional, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Extra, Field -from langchain_core.schema import RUN_KEY, BaseOutputParser, BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import Callbacks from langchain.chains.constitutional_ai.models import ConstitutionalPrinciple from langchain.chains.llm import LLMChain from langchain.evaluation.criteria.prompt import PROMPT, PROMPT_WITH_REFERENCES from langchain.evaluation.schema import LLMEvalChain, StringEvaluator +from langchain.schema import RUN_KEY class Criteria(str, Enum): diff --git a/libs/langchain/langchain/evaluation/embedding_distance/base.py b/libs/langchain/langchain/evaluation/embedding_distance/base.py index 182246085dd6a..84340ed1b1142 100644 --- a/libs/langchain/langchain/evaluation/embedding_distance/base.py +++ b/libs/langchain/langchain/evaluation/embedding_distance/base.py @@ -3,9 +3,8 @@ from typing import Any, Dict, List, Optional import numpy as np +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import RUN_KEY -from langchain_core.schema.embeddings import Embeddings from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, @@ -15,6 +14,7 @@ from langchain.chains.base import Chain from langchain.embeddings.openai import OpenAIEmbeddings from langchain.evaluation.schema import PairwiseStringEvaluator, StringEvaluator +from langchain.schema import RUN_KEY from langchain.utils.math import cosine_similarity diff --git a/libs/langchain/langchain/evaluation/loading.py b/libs/langchain/langchain/evaluation/loading.py index b2d8b63e4442b..54e041dc19ade 100644 --- a/libs/langchain/langchain/evaluation/loading.py +++ b/libs/langchain/langchain/evaluation/loading.py @@ -1,7 +1,7 @@ """Loading datasets and evaluators.""" from typing import Any, Dict, List, Optional, Sequence, Type, Union -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.chains.base import Chain from langchain.chat_models.openai import ChatOpenAI diff --git a/libs/langchain/langchain/evaluation/qa/eval_chain.py b/libs/langchain/langchain/evaluation/qa/eval_chain.py index 369c976863b5a..ae63aa343f8c1 100644 --- a/libs/langchain/langchain/evaluation/qa/eval_chain.py +++ b/libs/langchain/langchain/evaluation/qa/eval_chain.py @@ -5,15 +5,15 @@ import string from typing import Any, List, Optional, Sequence, Tuple +from langchain_core.language_models import BaseLanguageModel from langchain_core.prompts import PromptTemplate from langchain_core.pydantic_v1 import Extra -from langchain_core.schema import RUN_KEY -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import Callbacks from langchain.chains.llm import LLMChain from langchain.evaluation.qa.eval_prompt import CONTEXT_PROMPT, COT_PROMPT, PROMPT from langchain.evaluation.schema import LLMEvalChain, StringEvaluator +from langchain.schema import RUN_KEY def _get_score(text: str) -> Optional[Tuple[str, int]]: diff --git a/libs/langchain/langchain/evaluation/qa/generate_chain.py b/libs/langchain/langchain/evaluation/qa/generate_chain.py index 90588fee2e534..46854fae00642 100644 --- a/libs/langchain/langchain/evaluation/qa/generate_chain.py +++ b/libs/langchain/langchain/evaluation/qa/generate_chain.py @@ -3,9 +3,9 @@ from typing import Any +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseLLMOutputParser from langchain_core.pydantic_v1 import Field -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.output_parser import BaseLLMOutputParser from langchain.chains.llm import LLMChain from langchain.evaluation.qa.generate_prompt import PROMPT diff --git a/libs/langchain/langchain/evaluation/schema.py b/libs/langchain/langchain/evaluation/schema.py index 95140a2ef1b7c..bb9d459344341 100644 --- a/libs/langchain/langchain/evaluation/schema.py +++ b/libs/langchain/langchain/evaluation/schema.py @@ -9,8 +9,8 @@ from typing import Any, Optional, Sequence, Tuple, Union from warnings import warn -from langchain_core.schema.agent import AgentAction -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.agents import AgentAction +from langchain_core.language_models import BaseLanguageModel from langchain.chains.base import Chain diff --git a/libs/langchain/langchain/evaluation/scoring/eval_chain.py b/libs/langchain/langchain/evaluation/scoring/eval_chain.py index ecf00495d8288..3f7449bb0dc28 100644 --- a/libs/langchain/langchain/evaluation/scoring/eval_chain.py +++ b/libs/langchain/langchain/evaluation/scoring/eval_chain.py @@ -5,10 +5,10 @@ import re from typing import Any, Dict, List, Optional, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import Extra, Field -from langchain_core.schema import RUN_KEY, BaseOutputParser -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import Callbacks from langchain.chains.constitutional_ai.models import ConstitutionalPrinciple @@ -26,6 +26,7 @@ SCORING_TEMPLATE, SCORING_TEMPLATE_WITH_REFERENCE, ) +from langchain.schema import RUN_KEY logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/evaluation/string_distance/base.py b/libs/langchain/langchain/evaluation/string_distance/base.py index 07940c644d086..92f8632950416 100644 --- a/libs/langchain/langchain/evaluation/string_distance/base.py +++ b/libs/langchain/langchain/evaluation/string_distance/base.py @@ -4,7 +4,6 @@ from typing import Any, Callable, Dict, List, Optional from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import RUN_KEY from langchain.callbacks.manager import ( AsyncCallbackManagerForChainRun, @@ -13,6 +12,7 @@ ) from langchain.chains.base import Chain from langchain.evaluation.schema import PairwiseStringEvaluator, StringEvaluator +from langchain.schema import RUN_KEY def _load_rapidfuzz() -> Any: diff --git a/libs/langchain/langchain/globals/__init__.py b/libs/langchain/langchain/globals/__init__.py index 883ddb7ca47ca..77aa6662da872 100644 --- a/libs/langchain/langchain/globals/__init__.py +++ b/libs/langchain/langchain/globals/__init__.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: - from langchain_core.schema import BaseCache + from langchain_core.caches import BaseCache # DO NOT USE THESE VALUES DIRECTLY! diff --git a/libs/langchain/langchain/graphs/graph_document.py b/libs/langchain/langchain/graphs/graph_document.py index 00625f1d75f6b..3e9a597cc565f 100644 --- a/libs/langchain/langchain/graphs/graph_document.py +++ b/libs/langchain/langchain/graphs/graph_document.py @@ -2,9 +2,9 @@ from typing import List, Union +from langchain_core.documents import Document from langchain_core.load.serializable import Serializable from langchain_core.pydantic_v1 import Field -from langchain_core.schema import Document class Node(Serializable): diff --git a/libs/langchain/langchain/indexes/_api.py b/libs/langchain/langchain/indexes/_api.py index 24681e44003e7..3b70ec111e98a 100644 --- a/libs/langchain/langchain/indexes/_api.py +++ b/libs/langchain/langchain/indexes/_api.py @@ -24,9 +24,9 @@ cast, ) +from langchain_core.documents import Document from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import Document -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.document_loaders.base import BaseLoader from langchain.indexes.base import NAMESPACE_UUID, RecordManager diff --git a/libs/langchain/langchain/indexes/graph.py b/libs/langchain/langchain/indexes/graph.py index 7fe485200734b..a1ded5b7bd4be 100644 --- a/libs/langchain/langchain/indexes/graph.py +++ b/libs/langchain/langchain/indexes/graph.py @@ -1,9 +1,9 @@ """Graph Index Creator.""" from typing import Optional, Type +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.prompt_template import BasePromptTemplate from langchain.chains.llm import LLMChain from langchain.graphs.networkx_graph import NetworkxEntityGraph, parse_triples diff --git a/libs/langchain/langchain/indexes/vectorstore.py b/libs/langchain/langchain/indexes/vectorstore.py index 940dceef37333..4937a44a109a9 100644 --- a/libs/langchain/langchain/indexes/vectorstore.py +++ b/libs/langchain/langchain/indexes/vectorstore.py @@ -1,10 +1,10 @@ from typing import Any, Dict, List, Optional, Type +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Extra, Field -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain from langchain.chains.retrieval_qa.base import RetrievalQA diff --git a/libs/langchain/langchain/llms/anthropic.py b/libs/langchain/langchain/llms/anthropic.py index b93edd2647618..ecf0df0256877 100644 --- a/libs/langchain/langchain/llms/anthropic.py +++ b/libs/langchain/langchain/llms/anthropic.py @@ -11,10 +11,10 @@ Optional, ) +from langchain_core.language_models import BaseLanguageModel +from langchain_core.outputs import GenerationChunk +from langchain_core.prompts import PromptValue from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.output import GenerationChunk -from langchain_core.schema.prompt import PromptValue from langchain_core.utils import ( check_package_version, get_pydantic_field_names, diff --git a/libs/langchain/langchain/llms/anyscale.py b/libs/langchain/langchain/llms/anyscale.py index 2ffeab409877e..e2f26463ecd95 100644 --- a/libs/langchain/langchain/llms/anyscale.py +++ b/libs/langchain/langchain/llms/anyscale.py @@ -12,9 +12,8 @@ cast, ) +from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema import Generation, LLMResult -from langchain_core.schema.output import GenerationChunk from langchain_core.utils import convert_to_secret_str from langchain.callbacks.manager import ( diff --git a/libs/langchain/langchain/llms/baidu_qianfan_endpoint.py b/libs/langchain/langchain/llms/baidu_qianfan_endpoint.py index 69f5538739d46..8a23a7d7d09fd 100644 --- a/libs/langchain/langchain/llms/baidu_qianfan_endpoint.py +++ b/libs/langchain/langchain/llms/baidu_qianfan_endpoint.py @@ -10,8 +10,8 @@ Optional, ) +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/base.py b/libs/langchain/langchain/llms/base.py index a6ffab2a1868e..f6739068649b9 100644 --- a/libs/langchain/langchain/llms/base.py +++ b/libs/langchain/langchain/llms/base.py @@ -1,4 +1,4 @@ -from langchain_core.llm import ( +from langchain_core.language_models.llms import ( LLM, BaseLLM, create_base_retry_decorator, diff --git a/libs/langchain/langchain/llms/bedrock.py b/libs/langchain/langchain/llms/bedrock.py index 7a1dd1b0a09a3..3238c7c723c65 100644 --- a/libs/langchain/langchain/llms/bedrock.py +++ b/libs/langchain/langchain/llms/bedrock.py @@ -3,8 +3,8 @@ from abc import ABC from typing import Any, Dict, Iterator, List, Mapping, Optional +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import LLM diff --git a/libs/langchain/langchain/llms/clarifai.py b/libs/langchain/langchain/llms/clarifai.py index 6da1851a70349..632d45fe06c45 100644 --- a/libs/langchain/langchain/llms/clarifai.py +++ b/libs/langchain/langchain/llms/clarifai.py @@ -1,8 +1,8 @@ import logging from typing import Any, Dict, List, Optional +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import Generation, LLMResult from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import LLM diff --git a/libs/langchain/langchain/llms/ctranslate2.py b/libs/langchain/langchain/llms/ctranslate2.py index 060c3d3615882..940fc844127e0 100644 --- a/libs/langchain/langchain/llms/ctranslate2.py +++ b/libs/langchain/langchain/llms/ctranslate2.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional, Union +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema.output import Generation, LLMResult from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import BaseLLM diff --git a/libs/langchain/langchain/llms/deepsparse.py b/libs/langchain/langchain/llms/deepsparse.py index 24f1d0e6cd299..362492a8564ba 100644 --- a/libs/langchain/langchain/llms/deepsparse.py +++ b/libs/langchain/langchain/llms/deepsparse.py @@ -7,7 +7,7 @@ ) from langchain.llms.base import LLM from langchain.llms.utils import enforce_stop_tokens -from langchain_core.schema.output import GenerationChunk +from langchain_core.outputs import GenerationChunk class DeepSparse(LLM): diff --git a/libs/langchain/langchain/llms/fake.py b/libs/langchain/langchain/llms/fake.py index f77f919e6c92b..eb4413dff1fa1 100644 --- a/libs/langchain/langchain/llms/fake.py +++ b/libs/langchain/langchain/llms/fake.py @@ -2,8 +2,8 @@ import time from typing import Any, AsyncIterator, Iterator, List, Mapping, Optional +from langchain_core.language_models import LanguageModelInput from langchain_core.runnables import RunnableConfig -from langchain_core.schema.language_model import LanguageModelInput from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/fireworks.py b/libs/langchain/langchain/llms/fireworks.py index 737d691da8aaf..7ba2be0fb7d89 100644 --- a/libs/langchain/langchain/llms/fireworks.py +++ b/libs/langchain/langchain/llms/fireworks.py @@ -2,8 +2,8 @@ from concurrent.futures import ThreadPoolExecutor from typing import Any, AsyncIterator, Callable, Dict, Iterator, List, Optional, Union +from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.pydantic_v1 import Field, SecretStr, root_validator -from langchain_core.schema.output import Generation, GenerationChunk, LLMResult from langchain_core.utils import convert_to_secret_str from langchain.callbacks.manager import ( diff --git a/libs/langchain/langchain/llms/gigachat.py b/libs/langchain/langchain/llms/gigachat.py index 5ea544885c2e4..a1b296f1d0e59 100644 --- a/libs/langchain/langchain/llms/gigachat.py +++ b/libs/langchain/langchain/llms/gigachat.py @@ -5,8 +5,8 @@ from typing import Any, AsyncIterator, Dict, Iterator, List, Optional from langchain_core.load.serializable import Serializable +from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema.output import Generation, GenerationChunk, LLMResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/google_palm.py b/libs/langchain/langchain/llms/google_palm.py index f3aadc3a0d33c..1e40143c89790 100644 --- a/libs/langchain/langchain/llms/google_palm.py +++ b/libs/langchain/langchain/llms/google_palm.py @@ -3,8 +3,8 @@ import logging from typing import Any, Callable, Dict, List, Optional +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import Generation, LLMResult from tenacity import ( before_sleep_log, retry, diff --git a/libs/langchain/langchain/llms/gradient_ai.py b/libs/langchain/langchain/llms/gradient_ai.py index 3ee979aee3559..eeb8c7b739df1 100644 --- a/libs/langchain/langchain/llms/gradient_ai.py +++ b/libs/langchain/langchain/llms/gradient_ai.py @@ -5,8 +5,8 @@ import aiohttp import requests +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema import Generation, LLMResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/huggingface_pipeline.py b/libs/langchain/langchain/llms/huggingface_pipeline.py index 095f9b186f630..d0abc9f449c3a 100644 --- a/libs/langchain/langchain/llms/huggingface_pipeline.py +++ b/libs/langchain/langchain/llms/huggingface_pipeline.py @@ -4,8 +4,8 @@ import logging from typing import Any, List, Mapping, Optional +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Extra -from langchain_core.schema import Generation, LLMResult from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import BaseLLM diff --git a/libs/langchain/langchain/llms/huggingface_text_gen_inference.py b/libs/langchain/langchain/llms/huggingface_text_gen_inference.py index 15267810252f6..5caa376158451 100644 --- a/libs/langchain/langchain/llms/huggingface_text_gen_inference.py +++ b/libs/langchain/langchain/llms/huggingface_text_gen_inference.py @@ -1,8 +1,8 @@ import logging from typing import Any, AsyncIterator, Dict, Iterator, List, Optional +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema.output import GenerationChunk from langchain_core.utils import get_pydantic_field_names from langchain.callbacks.manager import ( diff --git a/libs/langchain/langchain/llms/llamacpp.py b/libs/langchain/langchain/llms/llamacpp.py index 8da36d325b14e..be484ccb0f08c 100644 --- a/libs/langchain/langchain/llms/llamacpp.py +++ b/libs/langchain/langchain/llms/llamacpp.py @@ -4,8 +4,8 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Union +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema.output import GenerationChunk from langchain_core.utils import get_pydantic_field_names from langchain_core.utils.utils import build_extra_kwargs diff --git a/libs/langchain/langchain/llms/ollama.py b/libs/langchain/langchain/llms/ollama.py index 3e6232f4397c4..e212e3b012ca7 100644 --- a/libs/langchain/langchain/llms/ollama.py +++ b/libs/langchain/langchain/llms/ollama.py @@ -2,10 +2,9 @@ from typing import Any, Dict, Iterator, List, Mapping, Optional import requests +from langchain_core.language_models import BaseLanguageModel +from langchain_core.outputs import GenerationChunk, LLMResult from langchain_core.pydantic_v1 import Extra -from langchain_core.schema import LLMResult -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import BaseLLM diff --git a/libs/langchain/langchain/llms/opaqueprompts.py b/libs/langchain/langchain/llms/opaqueprompts.py index 9a58819045409..f201f9c00b392 100644 --- a/libs/langchain/langchain/llms/opaqueprompts.py +++ b/libs/langchain/langchain/llms/opaqueprompts.py @@ -1,8 +1,8 @@ import logging from typing import Any, Dict, List, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import LLM diff --git a/libs/langchain/langchain/llms/openai.py b/libs/langchain/langchain/llms/openai.py index 1a0439709c7a5..508beb681c856 100644 --- a/libs/langchain/langchain/llms/openai.py +++ b/libs/langchain/langchain/llms/openai.py @@ -21,9 +21,8 @@ Union, ) +from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import Generation, LLMResult -from langchain_core.schema.output import GenerationChunk from langchain_core.utils import get_pydantic_field_names from langchain_core.utils.utils import build_extra_kwargs diff --git a/libs/langchain/langchain/llms/pai_eas_endpoint.py b/libs/langchain/langchain/llms/pai_eas_endpoint.py index 4e4bd7cde03bf..d7447b738ddbf 100644 --- a/libs/langchain/langchain/llms/pai_eas_endpoint.py +++ b/libs/langchain/langchain/llms/pai_eas_endpoint.py @@ -3,8 +3,8 @@ from typing import Any, Dict, Iterator, List, Mapping, Optional import requests +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import LLM diff --git a/libs/langchain/langchain/llms/promptlayer_openai.py b/libs/langchain/langchain/llms/promptlayer_openai.py index 35434481268da..b8037cdbfbe7d 100644 --- a/libs/langchain/langchain/llms/promptlayer_openai.py +++ b/libs/langchain/langchain/llms/promptlayer_openai.py @@ -1,7 +1,7 @@ import datetime from typing import Any, List, Optional -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/replicate.py b/libs/langchain/langchain/llms/replicate.py index 34e60851ed741..17dc7b42d2435 100644 --- a/libs/langchain/langchain/llms/replicate.py +++ b/libs/langchain/langchain/llms/replicate.py @@ -3,8 +3,8 @@ import logging from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import Extra, Field, root_validator -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import LLM diff --git a/libs/langchain/langchain/llms/textgen.py b/libs/langchain/langchain/llms/textgen.py index 93b387c85b8fe..37c68cf9f2b93 100644 --- a/libs/langchain/langchain/llms/textgen.py +++ b/libs/langchain/langchain/llms/textgen.py @@ -3,8 +3,8 @@ from typing import Any, AsyncIterator, Dict, Iterator, List, Optional import requests +from langchain_core.outputs import GenerationChunk from langchain_core.pydantic_v1 import Field -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/titan_takeoff.py b/libs/langchain/langchain/llms/titan_takeoff.py index af9a5097985cd..759e565291c6a 100644 --- a/libs/langchain/langchain/llms/titan_takeoff.py +++ b/libs/langchain/langchain/llms/titan_takeoff.py @@ -1,7 +1,7 @@ from typing import Any, Iterator, List, Mapping, Optional import requests -from langchain_core.schema.output import GenerationChunk +from langchain_core.outputs import GenerationChunk from requests.exceptions import ConnectionError from langchain.callbacks.manager import CallbackManagerForLLMRun diff --git a/libs/langchain/langchain/llms/titan_takeoff_pro.py b/libs/langchain/langchain/llms/titan_takeoff_pro.py index cd3cc7bd29e82..05de21e94a420 100644 --- a/libs/langchain/langchain/llms/titan_takeoff_pro.py +++ b/libs/langchain/langchain/llms/titan_takeoff_pro.py @@ -1,7 +1,7 @@ from typing import Any, Iterator, List, Mapping, Optional import requests -from langchain_core.schema.output import GenerationChunk +from langchain_core.outputs import GenerationChunk from requests.exceptions import ConnectionError from langchain.callbacks.manager import CallbackManagerForLLMRun diff --git a/libs/langchain/langchain/llms/tongyi.py b/libs/langchain/langchain/llms/tongyi.py index bd5e3df5e53f4..74d904c4453f4 100644 --- a/libs/langchain/langchain/llms/tongyi.py +++ b/libs/langchain/langchain/llms/tongyi.py @@ -3,8 +3,8 @@ import logging from typing import Any, Callable, Dict, List, Optional +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema import Generation, LLMResult from requests.exceptions import HTTPError from tenacity import ( before_sleep_log, diff --git a/libs/langchain/langchain/llms/vertexai.py b/libs/langchain/langchain/llms/vertexai.py index 6539e7647e5d7..4a3c67b7818af 100644 --- a/libs/langchain/langchain/llms/vertexai.py +++ b/libs/langchain/langchain/llms/vertexai.py @@ -13,12 +13,8 @@ Union, ) +from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.pydantic_v1 import BaseModel, Field, root_validator -from langchain_core.schema import ( - Generation, - LLMResult, -) -from langchain_core.schema.output import GenerationChunk from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/langchain/llms/vllm.py b/libs/langchain/langchain/llms/vllm.py index e7fced22c1333..4ccb5a743bee9 100644 --- a/libs/langchain/langchain/llms/vllm.py +++ b/libs/langchain/langchain/llms/vllm.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Field, root_validator -from langchain_core.schema.output import Generation, LLMResult from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.llms.base import BaseLLM diff --git a/libs/langchain/langchain/memory/buffer.py b/libs/langchain/langchain/memory/buffer.py index f4c6d356d6c62..c8b819fb7543d 100644 --- a/libs/langchain/langchain/memory/buffer.py +++ b/libs/langchain/langchain/memory/buffer.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional +from langchain_core.messages import BaseMessage, get_buffer_string from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema.messages import BaseMessage, get_buffer_string from langchain.memory.chat_memory import BaseChatMemory, BaseMemory from langchain.memory.utils import get_prompt_input_key diff --git a/libs/langchain/langchain/memory/buffer_window.py b/libs/langchain/langchain/memory/buffer_window.py index 50ddbad655abf..7aa9fa59c7291 100644 --- a/libs/langchain/langchain/memory/buffer_window.py +++ b/libs/langchain/langchain/memory/buffer_window.py @@ -1,6 +1,6 @@ from typing import Any, Dict, List, Union -from langchain_core.schema.messages import BaseMessage, get_buffer_string +from langchain_core.messages import BaseMessage, get_buffer_string from langchain.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain/memory/chat_memory.py b/libs/langchain/langchain/memory/chat_memory.py index 882fc1a4e2788..d6afb4079e6ae 100644 --- a/libs/langchain/langchain/memory/chat_memory.py +++ b/libs/langchain/langchain/memory/chat_memory.py @@ -1,8 +1,9 @@ from abc import ABC from typing import Any, Dict, Optional, Tuple +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.memory import BaseMemory from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BaseChatMessageHistory, BaseMemory from langchain.memory.chat_message_histories.in_memory import ChatMessageHistory from langchain.memory.utils import get_prompt_input_key diff --git a/libs/langchain/langchain/memory/chat_message_histories/cassandra.py b/libs/langchain/langchain/memory/chat_message_histories/cassandra.py index 73baa95071441..bc3e0794652b3 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/cassandra.py +++ b/libs/langchain/langchain/memory/chat_message_histories/cassandra.py @@ -8,12 +8,10 @@ if typing.TYPE_CHECKING: from cassandra.cluster import Session -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -66,7 +64,7 @@ def messages(self) -> List[BaseMessage]: # type: ignore def add_message(self, message: BaseMessage) -> None: """Write a message to the table""" self.blob_history.store( - self.session_id, json.dumps(_message_to_dict(message)), self.ttl_seconds + self.session_id, json.dumps(message_to_dict(message)), self.ttl_seconds ) def clear(self) -> None: diff --git a/libs/langchain/langchain/memory/chat_message_histories/cosmos_db.py b/libs/langchain/langchain/memory/chat_message_histories/cosmos_db.py index e01a2343880fe..4210d7a707623 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/cosmos_db.py +++ b/libs/langchain/langchain/memory/chat_message_histories/cosmos_db.py @@ -5,10 +5,8 @@ from types import TracebackType from typing import TYPE_CHECKING, Any, List, Optional, Type -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, messages_from_dict, messages_to_dict, diff --git a/libs/langchain/langchain/memory/chat_message_histories/dynamodb.py b/libs/langchain/langchain/memory/chat_message_histories/dynamodb.py index ff4d8960811b7..a804e75018bc7 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/dynamodb.py +++ b/libs/langchain/langchain/memory/chat_message_histories/dynamodb.py @@ -3,12 +3,10 @@ import logging from typing import TYPE_CHECKING, Dict, List, Optional -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, messages_to_dict, ) @@ -132,7 +130,7 @@ def add_message(self, message: BaseMessage) -> None: ) from e messages = messages_to_dict(self.messages) - _message = _message_to_dict(message) + _message = message_to_dict(message) messages.append(_message) try: diff --git a/libs/langchain/langchain/memory/chat_message_histories/elasticsearch.py b/libs/langchain/langchain/memory/chat_message_histories/elasticsearch.py index 46c49730537db..fc889dd37c135 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/elasticsearch.py +++ b/libs/langchain/langchain/memory/chat_message_histories/elasticsearch.py @@ -3,10 +3,10 @@ from time import time from typing import TYPE_CHECKING, Any, Dict, List, Optional -from langchain_core.schema import BaseChatMessageHistory -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -172,7 +172,7 @@ def add_message(self, message: BaseMessage) -> None: document={ "session_id": self.session_id, "created_at": round(time() * 1000), - "history": json.dumps(_message_to_dict(message)), + "history": json.dumps(message_to_dict(message)), }, refresh=True, ) diff --git a/libs/langchain/langchain/memory/chat_message_histories/file.py b/libs/langchain/langchain/memory/chat_message_histories/file.py index b9d2943cf099a..d6f2f43c3d652 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/file.py +++ b/libs/langchain/langchain/memory/chat_message_histories/file.py @@ -3,10 +3,8 @@ from pathlib import Path from typing import List -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, messages_from_dict, messages_to_dict, diff --git a/libs/langchain/langchain/memory/chat_message_histories/firestore.py b/libs/langchain/langchain/memory/chat_message_histories/firestore.py index d8aae0becc0f5..941bbb72f85c7 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/firestore.py +++ b/libs/langchain/langchain/memory/chat_message_histories/firestore.py @@ -4,10 +4,8 @@ import logging from typing import TYPE_CHECKING, List, Optional -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, messages_from_dict, messages_to_dict, diff --git a/libs/langchain/langchain/memory/chat_message_histories/in_memory.py b/libs/langchain/langchain/memory/chat_message_histories/in_memory.py index 3dc5142e461c1..8c76e850dd917 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/in_memory.py +++ b/libs/langchain/langchain/memory/chat_message_histories/in_memory.py @@ -1,10 +1,8 @@ from typing import List +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import BaseMessage class ChatMessageHistory(BaseChatMessageHistory, BaseModel): diff --git a/libs/langchain/langchain/memory/chat_message_histories/momento.py b/libs/langchain/langchain/memory/chat_message_histories/momento.py index c2d70b88b6ad5..c8be875dfffc5 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/momento.py +++ b/libs/langchain/langchain/memory/chat_message_histories/momento.py @@ -4,12 +4,10 @@ from datetime import timedelta from typing import TYPE_CHECKING, Any, Optional -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -163,7 +161,7 @@ def add_message(self, message: BaseMessage) -> None: """ from momento.responses import CacheListPushBack - item = json.dumps(_message_to_dict(message)) + item = json.dumps(message_to_dict(message)) push_response = self.cache_client.list_push_back( self.cache_name, self.key, item, ttl=self.ttl ) diff --git a/libs/langchain/langchain/memory/chat_message_histories/mongodb.py b/libs/langchain/langchain/memory/chat_message_histories/mongodb.py index 20c8bfb5cf0cd..5865f86b13c00 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/mongodb.py +++ b/libs/langchain/langchain/memory/chat_message_histories/mongodb.py @@ -2,12 +2,10 @@ import logging from typing import List -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -77,7 +75,7 @@ def add_message(self, message: BaseMessage) -> None: self.collection.insert_one( { "SessionId": self.session_id, - "History": json.dumps(_message_to_dict(message)), + "History": json.dumps(message_to_dict(message)), } ) except errors.WriteError as err: diff --git a/libs/langchain/langchain/memory/chat_message_histories/neo4j.py b/libs/langchain/langchain/memory/chat_message_histories/neo4j.py index b198ab05b2467..d2795538b63f0 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/neo4j.py +++ b/libs/langchain/langchain/memory/chat_message_histories/neo4j.py @@ -1,7 +1,7 @@ from typing import List, Optional, Union -from langchain_core.schema import BaseChatMessageHistory -from langchain_core.schema.messages import BaseMessage, messages_from_dict +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import BaseMessage, messages_from_dict from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/memory/chat_message_histories/postgres.py b/libs/langchain/langchain/memory/chat_message_histories/postgres.py index 19857662293e1..63794197e8f01 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/postgres.py +++ b/libs/langchain/langchain/memory/chat_message_histories/postgres.py @@ -2,12 +2,10 @@ import logging from typing import List -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -67,7 +65,7 @@ def add_message(self, message: BaseMessage) -> None: sql.Identifier(self.table_name) ) self.cursor.execute( - query, (self.session_id, json.dumps(_message_to_dict(message))) + query, (self.session_id, json.dumps(message_to_dict(message))) ) self.connection.commit() diff --git a/libs/langchain/langchain/memory/chat_message_histories/redis.py b/libs/langchain/langchain/memory/chat_message_histories/redis.py index 6939d2d7af1c2..18a9b71e3c240 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/redis.py +++ b/libs/langchain/langchain/memory/chat_message_histories/redis.py @@ -2,12 +2,10 @@ import logging from typing import List, Optional -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -58,7 +56,7 @@ def messages(self) -> List[BaseMessage]: # type: ignore def add_message(self, message: BaseMessage) -> None: """Append the message to the record in Redis""" - self.redis_client.lpush(self.key, json.dumps(_message_to_dict(message))) + self.redis_client.lpush(self.key, json.dumps(message_to_dict(message))) if self.ttl: self.redis_client.expire(self.key, self.ttl) diff --git a/libs/langchain/langchain/memory/chat_message_histories/rocksetdb.py b/libs/langchain/langchain/memory/chat_message_histories/rocksetdb.py index c995cf7338b83..2cc1e9ad77ff4 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/rocksetdb.py +++ b/libs/langchain/langchain/memory/chat_message_histories/rocksetdb.py @@ -3,10 +3,10 @@ from typing import Any, Callable, List, Union from uuid import uuid4 -from langchain_core.schema import BaseChatMessageHistory -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -249,7 +249,7 @@ def add_message(self, message: BaseMessage) -> None: self.rockset.model.patch_operation.PatchOperation( op="ADD", path=f"/{self.messages_key}/-", - value=_message_to_dict(message), + value=message_to_dict(message), ) ], ) diff --git a/libs/langchain/langchain/memory/chat_message_histories/singlestoredb.py b/libs/langchain/langchain/memory/chat_message_histories/singlestoredb.py index 7d1c92745832c..a1fcbc64f3d4d 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/singlestoredb.py +++ b/libs/langchain/langchain/memory/chat_message_histories/singlestoredb.py @@ -6,12 +6,10 @@ List, ) -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -252,7 +250,7 @@ def add_message(self, message: BaseMessage) -> None: self.session_id_field, self.message_field, ), - (self.session_id, json.dumps(_message_to_dict(message))), + (self.session_id, json.dumps(message_to_dict(message))), ) finally: cur.close() diff --git a/libs/langchain/langchain/memory/chat_message_histories/sql.py b/libs/langchain/langchain/memory/chat_message_histories/sql.py index e83ca7a971e50..fcc3ac71ab1a6 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/sql.py +++ b/libs/langchain/langchain/memory/chat_message_histories/sql.py @@ -9,12 +9,10 @@ from sqlalchemy.orm import declarative_base except ImportError: from sqlalchemy.ext.declarative import declarative_base -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) from sqlalchemy.orm import sessionmaker @@ -75,7 +73,7 @@ def from_sql_model(self, sql_message: Any) -> BaseMessage: def to_sql_model(self, message: BaseMessage, session_id: str) -> Any: return self.model_class( - session_id=session_id, message=json.dumps(_message_to_dict(message)) + session_id=session_id, message=json.dumps(message_to_dict(message)) ) def get_sql_model_class(self) -> Any: diff --git a/libs/langchain/langchain/memory/chat_message_histories/streamlit.py b/libs/langchain/langchain/memory/chat_message_histories/streamlit.py index 111b86fc38a6c..51350d36024ae 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/streamlit.py +++ b/libs/langchain/langchain/memory/chat_message_histories/streamlit.py @@ -1,9 +1,7 @@ from typing import List -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import BaseMessage +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import BaseMessage class StreamlitChatMessageHistory(BaseChatMessageHistory): diff --git a/libs/langchain/langchain/memory/chat_message_histories/upstash_redis.py b/libs/langchain/langchain/memory/chat_message_histories/upstash_redis.py index 94c83d1f68421..de1e7c3782275 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/upstash_redis.py +++ b/libs/langchain/langchain/memory/chat_message_histories/upstash_redis.py @@ -2,12 +2,10 @@ import logging from typing import List, Optional -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -62,7 +60,7 @@ def messages(self) -> List[BaseMessage]: # type: ignore def add_message(self, message: BaseMessage) -> None: """Append the message to the record in Upstash Redis""" - self.redis_client.lpush(self.key, json.dumps(_message_to_dict(message))) + self.redis_client.lpush(self.key, json.dumps(message_to_dict(message))) if self.ttl: self.redis_client.expire(self.key, self.ttl) diff --git a/libs/langchain/langchain/memory/chat_message_histories/xata.py b/libs/langchain/langchain/memory/chat_message_histories/xata.py index e9e98af525619..56bcf1d98f393 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/xata.py +++ b/libs/langchain/langchain/memory/chat_message_histories/xata.py @@ -1,12 +1,10 @@ import json from typing import List -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( BaseMessage, - _message_to_dict, + message_to_dict, messages_from_dict, ) @@ -69,7 +67,7 @@ def _create_table_if_not_exists(self) -> None: def add_message(self, message: BaseMessage) -> None: """Append the message to the Xata table""" - msg = _message_to_dict(message) + msg = message_to_dict(message) r = self._client.records().insert( self._table_name, { diff --git a/libs/langchain/langchain/memory/chat_message_histories/zep.py b/libs/langchain/langchain/memory/chat_message_histories/zep.py index a9709d2218cba..3899f89ba6e08 100644 --- a/libs/langchain/langchain/memory/chat_message_histories/zep.py +++ b/libs/langchain/langchain/memory/chat_message_histories/zep.py @@ -3,10 +3,8 @@ import logging from typing import TYPE_CHECKING, Any, Dict, List, Optional -from langchain_core.schema import ( - BaseChatMessageHistory, -) -from langchain_core.schema.messages import ( +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.messages import ( AIMessage, BaseMessage, HumanMessage, diff --git a/libs/langchain/langchain/memory/combined.py b/libs/langchain/langchain/memory/combined.py index f67064e72757a..5ab0048895bba 100644 --- a/libs/langchain/langchain/memory/combined.py +++ b/libs/langchain/langchain/memory/combined.py @@ -1,8 +1,8 @@ import warnings from typing import Any, Dict, List, Set +from langchain_core.memory import BaseMemory from langchain_core.pydantic_v1 import validator -from langchain_core.schema import BaseMemory from langchain.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain/memory/entity.py b/libs/langchain/langchain/memory/entity.py index fe7576573e614..db265cc5c2001 100644 --- a/libs/langchain/langchain/memory/entity.py +++ b/libs/langchain/langchain/memory/entity.py @@ -3,10 +3,10 @@ from itertools import islice from typing import Any, Dict, Iterable, List, Optional +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage, get_buffer_string +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage, get_buffer_string from langchain.chains.llm import LLMChain from langchain.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain/memory/kg.py b/libs/langchain/langchain/memory/kg.py index 831f649b606ec..39b83c67ed4c1 100644 --- a/libs/langchain/langchain/memory/kg.py +++ b/libs/langchain/langchain/memory/kg.py @@ -1,9 +1,9 @@ from typing import Any, Dict, List, Type, Union +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage, SystemMessage, get_buffer_string +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BasePromptTemplate -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage, SystemMessage, get_buffer_string from langchain.chains.llm import LLMChain from langchain.graphs import NetworkxEntityGraph diff --git a/libs/langchain/langchain/memory/motorhead_memory.py b/libs/langchain/langchain/memory/motorhead_memory.py index a5607c1ba051e..363a0f77a2853 100644 --- a/libs/langchain/langchain/memory/motorhead_memory.py +++ b/libs/langchain/langchain/memory/motorhead_memory.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional import requests -from langchain_core.schema.messages import get_buffer_string +from langchain_core.messages import get_buffer_string from langchain.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain/memory/readonly.py b/libs/langchain/langchain/memory/readonly.py index c037a90b51ae9..a564edb800267 100644 --- a/libs/langchain/langchain/memory/readonly.py +++ b/libs/langchain/langchain/memory/readonly.py @@ -1,6 +1,6 @@ from typing import Any, Dict, List -from langchain_core.schema import BaseMemory +from langchain_core.memory import BaseMemory class ReadOnlySharedMemory(BaseMemory): diff --git a/libs/langchain/langchain/memory/simple.py b/libs/langchain/langchain/memory/simple.py index 03fb4416743cf..7f2dfb5c14e7d 100644 --- a/libs/langchain/langchain/memory/simple.py +++ b/libs/langchain/langchain/memory/simple.py @@ -1,6 +1,6 @@ from typing import Any, Dict, List -from langchain_core.schema import BaseMemory +from langchain_core.memory import BaseMemory class SimpleMemory(BaseMemory): diff --git a/libs/langchain/langchain/memory/summary.py b/libs/langchain/langchain/memory/summary.py index 1f78ac24daf9c..b7cb996cb533f 100644 --- a/libs/langchain/langchain/memory/summary.py +++ b/libs/langchain/langchain/memory/summary.py @@ -2,13 +2,11 @@ from typing import Any, Dict, List, Type +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage, SystemMessage, get_buffer_string +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import ( - BaseChatMessageHistory, - BasePromptTemplate, -) -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage, SystemMessage, get_buffer_string from langchain.chains.llm import LLMChain from langchain.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain/memory/summary_buffer.py b/libs/langchain/langchain/memory/summary_buffer.py index d9c7bf29d70fd..23d050f62dd3d 100644 --- a/libs/langchain/langchain/memory/summary_buffer.py +++ b/libs/langchain/langchain/memory/summary_buffer.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List +from langchain_core.messages import BaseMessage, get_buffer_string from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema.messages import BaseMessage, get_buffer_string from langchain.memory.chat_memory import BaseChatMemory from langchain.memory.summary import SummarizerMixin diff --git a/libs/langchain/langchain/memory/token_buffer.py b/libs/langchain/langchain/memory/token_buffer.py index 57cf6fb5dfc58..853e0dcc6f6fc 100644 --- a/libs/langchain/langchain/memory/token_buffer.py +++ b/libs/langchain/langchain/memory/token_buffer.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage, get_buffer_string +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage, get_buffer_string from langchain.memory.chat_memory import BaseChatMemory diff --git a/libs/langchain/langchain/memory/vectorstore.py b/libs/langchain/langchain/memory/vectorstore.py index d76be477b1cae..3f4430ee7cf9a 100644 --- a/libs/langchain/langchain/memory/vectorstore.py +++ b/libs/langchain/langchain/memory/vectorstore.py @@ -2,9 +2,9 @@ from typing import Any, Dict, List, Optional, Sequence, Union +from langchain_core.documents import Document from langchain_core.pydantic_v1 import Field -from langchain_core.schema import Document -from langchain_core.schema.vectorstore import VectorStoreRetriever +from langchain_core.vectorstores import VectorStoreRetriever from langchain.memory.chat_memory import BaseMemory from langchain.memory.utils import get_prompt_input_key diff --git a/libs/langchain/langchain/output_parsers/boolean.py b/libs/langchain/langchain/output_parsers/boolean.py index 5d704ba94a41d..d670d155a3467 100644 --- a/libs/langchain/langchain/output_parsers/boolean.py +++ b/libs/langchain/langchain/output_parsers/boolean.py @@ -1,4 +1,4 @@ -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser class BooleanOutputParser(BaseOutputParser[bool]): diff --git a/libs/langchain/langchain/output_parsers/combining.py b/libs/langchain/langchain/output_parsers/combining.py index 300eec1c17507..0a9e11b791ce0 100644 --- a/libs/langchain/langchain/output_parsers/combining.py +++ b/libs/langchain/langchain/output_parsers/combining.py @@ -2,8 +2,8 @@ from typing import Any, Dict, List +from langchain_core.output_parsers import BaseOutputParser from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseOutputParser class CombiningOutputParser(BaseOutputParser): diff --git a/libs/langchain/langchain/output_parsers/datetime.py b/libs/langchain/langchain/output_parsers/datetime.py index 5113bb07efcdd..41636fa698767 100644 --- a/libs/langchain/langchain/output_parsers/datetime.py +++ b/libs/langchain/langchain/output_parsers/datetime.py @@ -2,7 +2,8 @@ from datetime import datetime, timedelta from typing import List -from langchain_core.schema import BaseOutputParser, OutputParserException +from langchain_core.exceptions import OutputParserException +from langchain_core.output_parsers import BaseOutputParser from langchain.utils import comma_list diff --git a/libs/langchain/langchain/output_parsers/enum.py b/libs/langchain/langchain/output_parsers/enum.py index a6baadbfd36f0..4396a5291bf5a 100644 --- a/libs/langchain/langchain/output_parsers/enum.py +++ b/libs/langchain/langchain/output_parsers/enum.py @@ -1,8 +1,9 @@ from enum import Enum from typing import Any, Dict, List, Type +from langchain_core.exceptions import OutputParserException +from langchain_core.output_parsers import BaseOutputParser from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseOutputParser, OutputParserException class EnumOutputParser(BaseOutputParser): diff --git a/libs/langchain/langchain/output_parsers/fix.py b/libs/langchain/langchain/output_parsers/fix.py index 37c186cc9abc3..036fd1ee0b7c0 100644 --- a/libs/langchain/langchain/output_parsers/fix.py +++ b/libs/langchain/langchain/output_parsers/fix.py @@ -2,12 +2,10 @@ from typing import Any, TypeVar -from langchain_core.schema import ( - BaseOutputParser, - BasePromptTemplate, - OutputParserException, -) -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain.output_parsers.prompts import NAIVE_FIX_PROMPT diff --git a/libs/langchain/langchain/output_parsers/json.py b/libs/langchain/langchain/output_parsers/json.py index e4282c5975bec..dacf92f2d37a5 100644 --- a/libs/langchain/langchain/output_parsers/json.py +++ b/libs/langchain/langchain/output_parsers/json.py @@ -6,10 +6,8 @@ from typing import Any, Callable, List, Optional import jsonpatch -from langchain_core.schema.output_parser import ( - BaseCumulativeTransformOutputParser, - OutputParserException, -) +from langchain_core.exceptions import OutputParserException +from langchain_core.output_parsers import BaseCumulativeTransformOutputParser def _replace_new_line(match: re.Match[str]) -> str: diff --git a/libs/langchain/langchain/output_parsers/openai_functions.py b/libs/langchain/langchain/output_parsers/openai_functions.py index 8a5259ac0b893..1b94bd0b35566 100644 --- a/libs/langchain/langchain/output_parsers/openai_functions.py +++ b/libs/langchain/langchain/output_parsers/openai_functions.py @@ -3,16 +3,13 @@ from typing import Any, Dict, List, Optional, Type, Union import jsonpatch -from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import ( - ChatGeneration, - Generation, - OutputParserException, -) -from langchain_core.schema.output_parser import ( +from langchain_core.exceptions import OutputParserException +from langchain_core.output_parsers import ( BaseCumulativeTransformOutputParser, BaseGenerationOutputParser, ) +from langchain_core.outputs import ChatGeneration, Generation +from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain.output_parsers.json import parse_partial_json diff --git a/libs/langchain/langchain/output_parsers/openai_tools.py b/libs/langchain/langchain/output_parsers/openai_tools.py index 5254718263962..3c8127c8fb123 100644 --- a/libs/langchain/langchain/output_parsers/openai_tools.py +++ b/libs/langchain/langchain/output_parsers/openai_tools.py @@ -2,15 +2,12 @@ import json from typing import Any, List, Type -from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema import ( - ChatGeneration, - Generation, - OutputParserException, -) -from langchain_core.schema.output_parser import ( +from langchain_core.exceptions import OutputParserException +from langchain_core.output_parsers import ( BaseGenerationOutputParser, ) +from langchain_core.outputs import ChatGeneration, Generation +from langchain_core.pydantic_v1 import BaseModel class JsonOutputToolsParser(BaseGenerationOutputParser[Any]): diff --git a/libs/langchain/langchain/output_parsers/pydantic.py b/libs/langchain/langchain/output_parsers/pydantic.py index 80fb5e926c3c7..b4fe96b5644f3 100644 --- a/libs/langchain/langchain/output_parsers/pydantic.py +++ b/libs/langchain/langchain/output_parsers/pydantic.py @@ -2,8 +2,9 @@ import re from typing import Type, TypeVar +from langchain_core.exceptions import OutputParserException +from langchain_core.output_parsers import BaseOutputParser from langchain_core.pydantic_v1 import BaseModel, ValidationError -from langchain_core.schema import BaseOutputParser, OutputParserException from langchain.output_parsers.format_instructions import PYDANTIC_FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/output_parsers/rail_parser.py b/libs/langchain/langchain/output_parsers/rail_parser.py index 64077cf09661d..2f6c7b7f0a75b 100644 --- a/libs/langchain/langchain/output_parsers/rail_parser.py +++ b/libs/langchain/langchain/output_parsers/rail_parser.py @@ -2,7 +2,7 @@ from typing import Any, Callable, Dict, Optional -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser class GuardrailsOutputParser(BaseOutputParser): diff --git a/libs/langchain/langchain/output_parsers/regex.py b/libs/langchain/langchain/output_parsers/regex.py index cc66b95a51fd8..ea8b053e1598c 100644 --- a/libs/langchain/langchain/output_parsers/regex.py +++ b/libs/langchain/langchain/output_parsers/regex.py @@ -3,7 +3,7 @@ import re from typing import Dict, List, Optional -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser class RegexParser(BaseOutputParser): diff --git a/libs/langchain/langchain/output_parsers/regex_dict.py b/libs/langchain/langchain/output_parsers/regex_dict.py index 9cb31db3665d8..1b390485da3f3 100644 --- a/libs/langchain/langchain/output_parsers/regex_dict.py +++ b/libs/langchain/langchain/output_parsers/regex_dict.py @@ -3,7 +3,7 @@ import re from typing import Dict, Optional -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser class RegexDictParser(BaseOutputParser): diff --git a/libs/langchain/langchain/output_parsers/retry.py b/libs/langchain/langchain/output_parsers/retry.py index e5ec6d29a7a5c..25b3a91695593 100644 --- a/libs/langchain/langchain/output_parsers/retry.py +++ b/libs/langchain/langchain/output_parsers/retry.py @@ -2,14 +2,11 @@ from typing import Any, TypeVar +from langchain_core.exceptions import OutputParserException +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate, PromptValue from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import ( - BaseOutputParser, - BasePromptTemplate, - OutputParserException, - PromptValue, -) -from langchain_core.schema.language_model import BaseLanguageModel NAIVE_COMPLETION_RETRY = """Prompt: {prompt} diff --git a/libs/langchain/langchain/output_parsers/structured.py b/libs/langchain/langchain/output_parsers/structured.py index 24f6177a14db7..1d1b61128fc32 100644 --- a/libs/langchain/langchain/output_parsers/structured.py +++ b/libs/langchain/langchain/output_parsers/structured.py @@ -2,8 +2,8 @@ from typing import Any, List +from langchain_core.output_parsers import BaseOutputParser from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema import BaseOutputParser from langchain.output_parsers.format_instructions import ( STRUCTURED_FORMAT_INSTRUCTIONS, diff --git a/libs/langchain/langchain/output_parsers/xml.py b/libs/langchain/langchain/output_parsers/xml.py index 94361f30f9b40..794d68c6b9eba 100644 --- a/libs/langchain/langchain/output_parsers/xml.py +++ b/libs/langchain/langchain/output_parsers/xml.py @@ -2,7 +2,7 @@ import xml.etree.ElementTree as ET from typing import Any, Dict, List, Optional -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser from langchain.output_parsers.format_instructions import XML_FORMAT_INSTRUCTIONS diff --git a/libs/langchain/langchain/prompts/__init__.py b/libs/langchain/langchain/prompts/__init__.py index 18484ac266e11..d8ff6a9ad99cd 100644 --- a/libs/langchain/langchain/prompts/__init__.py +++ b/libs/langchain/langchain/prompts/__init__.py @@ -27,32 +27,31 @@ ChatPromptValue """ # noqa: E501 -from langchain_core.prompts.base import StringPromptTemplate -from langchain_core.prompts.chat import ( +from langchain_core.example_selectors import ( + LengthBasedExampleSelector, + MaxMarginalRelevanceExampleSelector, + SemanticSimilarityExampleSelector, +) +from langchain_core.prompts import ( AIMessagePromptTemplate, BaseChatPromptTemplate, + BasePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, + FewShotChatMessagePromptTemplate, + FewShotPromptTemplate, + FewShotPromptWithTemplates, HumanMessagePromptTemplate, MessagesPlaceholder, + PipelinePromptTemplate, + Prompt, + PromptTemplate, + StringPromptTemplate, SystemMessagePromptTemplate, + load_prompt, ) -from langchain_core.prompts.few_shot import ( - FewShotChatMessagePromptTemplate, - FewShotPromptTemplate, -) -from langchain_core.prompts.few_shot_with_templates import FewShotPromptWithTemplates -from langchain_core.prompts.loading import load_prompt -from langchain_core.prompts.pipeline import PipelinePromptTemplate -from langchain_core.prompts.prompt import Prompt, PromptTemplate -from langchain_core.schema.prompt_template import BasePromptTemplate -from langchain.prompts.example_selector import ( - LengthBasedExampleSelector, - MaxMarginalRelevanceExampleSelector, - NGramOverlapExampleSelector, - SemanticSimilarityExampleSelector, -) +from langchain.prompts.example_selector import NGramOverlapExampleSelector __all__ = [ "AIMessagePromptTemplate", diff --git a/libs/langchain/langchain/prompts/base.py b/libs/langchain/langchain/prompts/base.py index a266d5e4c0b6b..a535e905a6a61 100644 --- a/libs/langchain/langchain/prompts/base.py +++ b/libs/langchain/langchain/prompts/base.py @@ -1,3 +1,4 @@ +from langchain_core.prompts import BasePromptTemplate from langchain_core.prompts.base import ( StringPromptTemplate, StringPromptValue, @@ -6,7 +7,6 @@ jinja2_formatter, validate_jinja2, ) -from langchain_core.schema.prompt_template import BasePromptTemplate __all__ = [ "jinja2_formatter", diff --git a/libs/langchain/langchain/prompts/example_selector/__init__.py b/libs/langchain/langchain/prompts/example_selector/__init__.py index 7cb71659c6276..670eef9c4c9e8 100644 --- a/libs/langchain/langchain/prompts/example_selector/__init__.py +++ b/libs/langchain/langchain/prompts/example_selector/__init__.py @@ -1,8 +1,8 @@ """Logic for selecting examples to include in prompts.""" -from langchain_core.prompts.example_selector.length_based import ( +from langchain_core.example_selectors.length_based import ( LengthBasedExampleSelector, ) -from langchain_core.prompts.example_selector.semantic_similarity import ( +from langchain_core.example_selectors.semantic_similarity import ( MaxMarginalRelevanceExampleSelector, SemanticSimilarityExampleSelector, ) diff --git a/libs/langchain/langchain/prompts/example_selector/base.py b/libs/langchain/langchain/prompts/example_selector/base.py index 3649ca63e6782..d6c9e074bdb57 100644 --- a/libs/langchain/langchain/prompts/example_selector/base.py +++ b/libs/langchain/langchain/prompts/example_selector/base.py @@ -1,3 +1,3 @@ -from langchain_core.prompts.example_selector.base import BaseExampleSelector +from langchain_core.example_selectors.base import BaseExampleSelector __all__ = ["BaseExampleSelector"] diff --git a/libs/langchain/langchain/prompts/example_selector/length_based.py b/libs/langchain/langchain/prompts/example_selector/length_based.py index e9edb8fcff434..1bb38d8a0c153 100644 --- a/libs/langchain/langchain/prompts/example_selector/length_based.py +++ b/libs/langchain/langchain/prompts/example_selector/length_based.py @@ -1,4 +1,4 @@ -from langchain_core.prompts.example_selector.length_based import ( +from langchain_core.example_selectors.length_based import ( LengthBasedExampleSelector, ) diff --git a/libs/langchain/langchain/prompts/example_selector/ngram_overlap.py b/libs/langchain/langchain/prompts/example_selector/ngram_overlap.py index 21bfa4c411d02..c439c946ed30d 100644 --- a/libs/langchain/langchain/prompts/example_selector/ngram_overlap.py +++ b/libs/langchain/langchain/prompts/example_selector/ngram_overlap.py @@ -6,8 +6,8 @@ from typing import Dict, List import numpy as np -from langchain_core.prompts.example_selector.base import BaseExampleSelector -from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.example_selectors.base import BaseExampleSelector +from langchain_core.prompts import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, root_validator diff --git a/libs/langchain/langchain/prompts/example_selector/semantic_similarity.py b/libs/langchain/langchain/prompts/example_selector/semantic_similarity.py index 2f730f1895f57..e921236d8784c 100644 --- a/libs/langchain/langchain/prompts/example_selector/semantic_similarity.py +++ b/libs/langchain/langchain/prompts/example_selector/semantic_similarity.py @@ -1,4 +1,4 @@ -from langchain_core.prompts.example_selector.semantic_similarity import ( +from langchain_core.example_selectors.semantic_similarity import ( MaxMarginalRelevanceExampleSelector, SemanticSimilarityExampleSelector, sorted_values, diff --git a/libs/langchain/langchain/retrievers/arcee.py b/libs/langchain/langchain/retrievers/arcee.py index a3bd2883e3449..e6b168c8f9a43 100644 --- a/libs/langchain/langchain/retrievers/arcee.py +++ b/libs/langchain/langchain/retrievers/arcee.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Optional from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/retrievers/arxiv.py b/libs/langchain/langchain/retrievers/arxiv.py index 50880935621b4..951bc2ecd8bf8 100644 --- a/libs/langchain/langchain/retrievers/arxiv.py +++ b/libs/langchain/langchain/retrievers/arxiv.py @@ -1,6 +1,7 @@ from typing import List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.utilities.arxiv import ArxivAPIWrapper diff --git a/libs/langchain/langchain/retrievers/azure_cognitive_search.py b/libs/langchain/langchain/retrievers/azure_cognitive_search.py index 4a75291a66bb6..27cdd91815715 100644 --- a/libs/langchain/langchain/retrievers/azure_cognitive_search.py +++ b/libs/langchain/langchain/retrievers/azure_cognitive_search.py @@ -5,8 +5,9 @@ import aiohttp import requests +from langchain_core.documents import Document from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/bm25.py b/libs/langchain/langchain/retrievers/bm25.py index f1868e9b0e7b4..056b35d814ff1 100644 --- a/libs/langchain/langchain/retrievers/bm25.py +++ b/libs/langchain/langchain/retrievers/bm25.py @@ -2,7 +2,8 @@ from typing import Any, Callable, Dict, Iterable, List, Optional -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/chaindesk.py b/libs/langchain/langchain/retrievers/chaindesk.py index 71bdae23286b1..c39d9af0a6d88 100644 --- a/libs/langchain/langchain/retrievers/chaindesk.py +++ b/libs/langchain/langchain/retrievers/chaindesk.py @@ -2,7 +2,8 @@ import aiohttp import requests -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/chatgpt_plugin_retriever.py b/libs/langchain/langchain/retrievers/chatgpt_plugin_retriever.py index e38c9f85da71b..2981af93bc3cd 100644 --- a/libs/langchain/langchain/retrievers/chatgpt_plugin_retriever.py +++ b/libs/langchain/langchain/retrievers/chatgpt_plugin_retriever.py @@ -4,7 +4,8 @@ import aiohttp import requests -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/cohere_rag_retriever.py b/libs/langchain/langchain/retrievers/cohere_rag_retriever.py index 7c1fa4ac58e8f..48b4e2ea189c7 100644 --- a/libs/langchain/langchain/retrievers/cohere_rag_retriever.py +++ b/libs/langchain/langchain/retrievers/cohere_rag_retriever.py @@ -2,8 +2,10 @@ from typing import TYPE_CHECKING, Any, Dict, List +from langchain_core.documents import Document +from langchain_core.messages import HumanMessage from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BaseRetriever, Document, HumanMessage +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, @@ -12,7 +14,7 @@ from langchain.chat_models.base import BaseChatModel if TYPE_CHECKING: - from langchain_core.schema.messages import BaseMessage + from langchain_core.messages import BaseMessage def _get_docs(response: Any) -> List[Document]: diff --git a/libs/langchain/langchain/retrievers/contextual_compression.py b/libs/langchain/langchain/retrievers/contextual_compression.py index 1ef429ecddbce..d06a9b690838d 100644 --- a/libs/langchain/langchain/retrievers/contextual_compression.py +++ b/libs/langchain/langchain/retrievers/contextual_compression.py @@ -1,6 +1,7 @@ from typing import Any, List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/databerry.py b/libs/langchain/langchain/retrievers/databerry.py index 6913a642891bd..c8eda6fb2ec6d 100644 --- a/libs/langchain/langchain/retrievers/databerry.py +++ b/libs/langchain/langchain/retrievers/databerry.py @@ -2,7 +2,8 @@ import aiohttp import requests -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/docarray.py b/libs/langchain/langchain/retrievers/docarray.py index 8c20498a14c93..ce9a6d6543592 100644 --- a/libs/langchain/langchain/retrievers/docarray.py +++ b/libs/langchain/langchain/retrievers/docarray.py @@ -2,8 +2,9 @@ from typing import Any, Dict, List, Optional, Union import numpy as np -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/retrievers/document_compressors/base.py b/libs/langchain/langchain/retrievers/document_compressors/base.py index 42874801965b3..799112766a4b4 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/base.py +++ b/libs/langchain/langchain/retrievers/document_compressors/base.py @@ -3,8 +3,9 @@ from inspect import signature from typing import List, Optional, Sequence, Union +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema import BaseDocumentTransformer, Document from langchain.callbacks.manager import Callbacks diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py b/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py index c6ff92f1471fd..0a04eb5322db2 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py @@ -4,9 +4,10 @@ import asyncio from typing import Any, Callable, Dict, Optional, Sequence +from langchain_core.documents import Document +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts import PromptTemplate -from langchain_core.schema import BaseOutputParser, Document -from langchain_core.schema.language_model import BaseLanguageModel from langchain.callbacks.manager import Callbacks from langchain.chains.llm import LLMChain diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py index 5ce340480e0cd..5278065ee298b 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py @@ -1,9 +1,9 @@ """Filter that uses an LLM to drop documents that aren't relevant to the query.""" from typing import Any, Callable, Dict, Optional, Sequence -from langchain_core.prompts import PromptTemplate -from langchain_core.schema import BasePromptTemplate, Document -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.documents import Document +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate, PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.chains import LLMChain diff --git a/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py b/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py index 313832534389d..21972e2f142aa 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py +++ b/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py @@ -2,8 +2,8 @@ from typing import TYPE_CHECKING, Dict, Optional, Sequence +from langchain_core.documents import Document from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import Document from langchain.callbacks.manager import Callbacks from langchain.retrievers.document_compressors.base import BaseDocumentCompressor diff --git a/libs/langchain/langchain/retrievers/document_compressors/embeddings_filter.py b/libs/langchain/langchain/retrievers/document_compressors/embeddings_filter.py index 5b02bd48bb6ab..cc3e9d5ad3e10 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/embeddings_filter.py +++ b/libs/langchain/langchain/retrievers/document_compressors/embeddings_filter.py @@ -1,9 +1,9 @@ from typing import Callable, Dict, Optional, Sequence import numpy as np +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings from langchain.callbacks.manager import Callbacks from langchain.document_transformers.embeddings_redundant_filter import ( diff --git a/libs/langchain/langchain/retrievers/elastic_search_bm25.py b/libs/langchain/langchain/retrievers/elastic_search_bm25.py index cb69b8e3dd0f2..975b5f2aea024 100644 --- a/libs/langchain/langchain/retrievers/elastic_search_bm25.py +++ b/libs/langchain/langchain/retrievers/elastic_search_bm25.py @@ -5,7 +5,7 @@ import uuid from typing import Any, Iterable, List -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/retrievers/ensemble.py b/libs/langchain/langchain/retrievers/ensemble.py index df475d1dcaf49..de34d0c0b0f15 100644 --- a/libs/langchain/langchain/retrievers/ensemble.py +++ b/libs/langchain/langchain/retrievers/ensemble.py @@ -4,8 +4,9 @@ """ from typing import Any, Dict, List +from langchain_core.documents import Document from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/google_cloud_documentai_warehouse.py b/libs/langchain/langchain/retrievers/google_cloud_documentai_warehouse.py index e7f5e65cbbde7..6d205bd1e2221 100644 --- a/libs/langchain/langchain/retrievers/google_cloud_documentai_warehouse.py +++ b/libs/langchain/langchain/retrievers/google_cloud_documentai_warehouse.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/retrievers/google_vertex_ai_search.py b/libs/langchain/langchain/retrievers/google_vertex_ai_search.py index 786920b6625e2..7955b71deb44a 100644 --- a/libs/langchain/langchain/retrievers/google_vertex_ai_search.py +++ b/libs/langchain/langchain/retrievers/google_vertex_ai_search.py @@ -3,8 +3,9 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.utilities.vertexai import get_client_info diff --git a/libs/langchain/langchain/retrievers/kay.py b/libs/langchain/langchain/retrievers/kay.py index e3aac4dee3aa6..d57ebc709896a 100644 --- a/libs/langchain/langchain/retrievers/kay.py +++ b/libs/langchain/langchain/retrievers/kay.py @@ -2,7 +2,8 @@ from typing import Any, List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/kendra.py b/libs/langchain/langchain/retrievers/kendra.py index 7667fc7c31647..2256c7d914e2e 100644 --- a/libs/langchain/langchain/retrievers/kendra.py +++ b/libs/langchain/langchain/retrievers/kendra.py @@ -3,7 +3,7 @@ from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Union from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator, validator -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/retrievers/knn.py b/libs/langchain/langchain/retrievers/knn.py index e5ef90907ecdc..a2f2cc432b6d3 100644 --- a/libs/langchain/langchain/retrievers/knn.py +++ b/libs/langchain/langchain/retrievers/knn.py @@ -8,8 +8,9 @@ from typing import Any, List, Optional import numpy as np -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/llama_index.py b/libs/langchain/langchain/retrievers/llama_index.py index e602bf72fc942..901e0317ed5b4 100644 --- a/libs/langchain/langchain/retrievers/llama_index.py +++ b/libs/langchain/langchain/retrievers/llama_index.py @@ -1,7 +1,8 @@ from typing import Any, Dict, List, cast +from langchain_core.documents import Document from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/merger_retriever.py b/libs/langchain/langchain/retrievers/merger_retriever.py index 6f12050042fd7..ab3124a6a7b4c 100644 --- a/libs/langchain/langchain/retrievers/merger_retriever.py +++ b/libs/langchain/langchain/retrievers/merger_retriever.py @@ -1,7 +1,8 @@ import asyncio from typing import List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/metal.py b/libs/langchain/langchain/retrievers/metal.py index f3039ddffc6f7..117137da1edaf 100644 --- a/libs/langchain/langchain/retrievers/metal.py +++ b/libs/langchain/langchain/retrievers/metal.py @@ -1,7 +1,8 @@ from typing import Any, List, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/milvus.py b/libs/langchain/langchain/retrievers/milvus.py index 121474aae7b64..feda37d5a73d8 100644 --- a/libs/langchain/langchain/retrievers/milvus.py +++ b/libs/langchain/langchain/retrievers/milvus.py @@ -2,9 +2,10 @@ import warnings from typing import Any, Dict, List, Optional +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.vectorstores.milvus import Milvus diff --git a/libs/langchain/langchain/retrievers/multi_query.py b/libs/langchain/langchain/retrievers/multi_query.py index 652ae4994940b..f30cc0266f86b 100644 --- a/libs/langchain/langchain/retrievers/multi_query.py +++ b/libs/langchain/langchain/retrievers/multi_query.py @@ -2,9 +2,10 @@ import logging from typing import List, Sequence +from langchain_core.documents import Document from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/multi_vector.py b/libs/langchain/langchain/retrievers/multi_vector.py index fec3b950cc970..81038b1d980b3 100644 --- a/libs/langchain/langchain/retrievers/multi_vector.py +++ b/libs/langchain/langchain/retrievers/multi_vector.py @@ -1,8 +1,10 @@ from typing import List +from langchain_core.documents import Document from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BaseRetriever, BaseStore, Document -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.retrievers import BaseRetriever +from langchain_core.stores import BaseStore +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/parent_document_retriever.py b/libs/langchain/langchain/retrievers/parent_document_retriever.py index b0d3d3ce20d70..d26f4b9685b7f 100644 --- a/libs/langchain/langchain/retrievers/parent_document_retriever.py +++ b/libs/langchain/langchain/retrievers/parent_document_retriever.py @@ -1,7 +1,7 @@ import uuid from typing import List, Optional -from langchain_core.schema.document import Document +from langchain_core.documents import Document from langchain.retrievers import MultiVectorRetriever from langchain.text_splitter import TextSplitter diff --git a/libs/langchain/langchain/retrievers/pinecone_hybrid_search.py b/libs/langchain/langchain/retrievers/pinecone_hybrid_search.py index cfe73ed43105e..c1f520d717903 100644 --- a/libs/langchain/langchain/retrievers/pinecone_hybrid_search.py +++ b/libs/langchain/langchain/retrievers/pinecone_hybrid_search.py @@ -3,9 +3,10 @@ import hashlib from typing import Any, Dict, List, Optional +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/pubmed.py b/libs/langchain/langchain/retrievers/pubmed.py index a3600d3b9fd6f..73ee6b0615059 100644 --- a/libs/langchain/langchain/retrievers/pubmed.py +++ b/libs/langchain/langchain/retrievers/pubmed.py @@ -1,6 +1,7 @@ from typing import List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.utilities.pubmed import PubMedAPIWrapper diff --git a/libs/langchain/langchain/retrievers/re_phraser.py b/libs/langchain/langchain/retrievers/re_phraser.py index 350866af4f50e..a986da9be1fe4 100644 --- a/libs/langchain/langchain/retrievers/re_phraser.py +++ b/libs/langchain/langchain/retrievers/re_phraser.py @@ -1,8 +1,9 @@ import logging from typing import List +from langchain_core.documents import Document from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/remote_retriever.py b/libs/langchain/langchain/retrievers/remote_retriever.py index a5ebd8ef63c27..9a27dc569588b 100644 --- a/libs/langchain/langchain/retrievers/remote_retriever.py +++ b/libs/langchain/langchain/retrievers/remote_retriever.py @@ -2,7 +2,8 @@ import aiohttp import requests -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/self_query/base.py b/libs/langchain/langchain/retrievers/self_query/base.py index 7739dfbe72cc4..cb51639622d98 100644 --- a/libs/langchain/langchain/retrievers/self_query/base.py +++ b/libs/langchain/langchain/retrievers/self_query/base.py @@ -2,11 +2,12 @@ import logging from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union +from langchain_core.documents import Document +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Field, root_validator +from langchain_core.retrievers import BaseRetriever from langchain_core.runnables import Runnable -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/svm.py b/libs/langchain/langchain/retrievers/svm.py index 99cff11fc181c..0372f36043679 100644 --- a/libs/langchain/langchain/retrievers/svm.py +++ b/libs/langchain/langchain/retrievers/svm.py @@ -4,8 +4,9 @@ from typing import Any, Iterable, List, Optional import numpy as np -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/tavily_search_api.py b/libs/langchain/langchain/retrievers/tavily_search_api.py index 5fdbc28eeae51..0b1a3c7d0d7cc 100644 --- a/libs/langchain/langchain/retrievers/tavily_search_api.py +++ b/libs/langchain/langchain/retrievers/tavily_search_api.py @@ -2,8 +2,8 @@ from enum import Enum from typing import Any, Dict, List, Optional -from langchain_core.schema import Document -from langchain_core.schema.retriever import BaseRetriever +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/tfidf.py b/libs/langchain/langchain/retrievers/tfidf.py index f81bc25f53ae6..9ac4d11d0b82e 100644 --- a/libs/langchain/langchain/retrievers/tfidf.py +++ b/libs/langchain/langchain/retrievers/tfidf.py @@ -4,7 +4,8 @@ from pathlib import Path from typing import Any, Dict, Iterable, List, Optional -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/time_weighted_retriever.py b/libs/langchain/langchain/retrievers/time_weighted_retriever.py index e3a901c9b6f1e..273839984d8b0 100644 --- a/libs/langchain/langchain/retrievers/time_weighted_retriever.py +++ b/libs/langchain/langchain/retrievers/time_weighted_retriever.py @@ -2,9 +2,10 @@ from copy import deepcopy from typing import Any, Dict, List, Optional, Tuple +from langchain_core.documents import Document from langchain_core.pydantic_v1 import Field -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/vespa_retriever.py b/libs/langchain/langchain/retrievers/vespa_retriever.py index 17b7e07e6bdcc..89d269ed4d26c 100644 --- a/libs/langchain/langchain/retrievers/vespa_retriever.py +++ b/libs/langchain/langchain/retrievers/vespa_retriever.py @@ -3,7 +3,8 @@ import json from typing import Any, Dict, List, Literal, Optional, Sequence, Union -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun diff --git a/libs/langchain/langchain/retrievers/weaviate_hybrid_search.py b/libs/langchain/langchain/retrievers/weaviate_hybrid_search.py index b28a643e8f7b2..a00db19af7b45 100644 --- a/libs/langchain/langchain/retrievers/weaviate_hybrid_search.py +++ b/libs/langchain/langchain/retrievers/weaviate_hybrid_search.py @@ -4,7 +4,7 @@ from uuid import uuid4 from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/retrievers/web_research.py b/libs/langchain/langchain/retrievers/web_research.py index f1ebe46ef3fda..74bd7f370c84e 100644 --- a/libs/langchain/langchain/retrievers/web_research.py +++ b/libs/langchain/langchain/retrievers/web_research.py @@ -2,10 +2,11 @@ import re from typing import List, Optional +from langchain_core.documents import Document from langchain_core.prompts import BasePromptTemplate, PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/wikipedia.py b/libs/langchain/langchain/retrievers/wikipedia.py index fccbfa1508fbe..e40eb7682c470 100644 --- a/libs/langchain/langchain/retrievers/wikipedia.py +++ b/libs/langchain/langchain/retrievers/wikipedia.py @@ -1,6 +1,7 @@ from typing import List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.utilities.wikipedia import WikipediaAPIWrapper diff --git a/libs/langchain/langchain/retrievers/you.py b/libs/langchain/langchain/retrievers/you.py index e0bb437e0f633..07c2625cd2ec8 100644 --- a/libs/langchain/langchain/retrievers/you.py +++ b/libs/langchain/langchain/retrievers/you.py @@ -1,7 +1,8 @@ from typing import Any, Dict, List, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/retrievers/zep.py b/libs/langchain/langchain/retrievers/zep.py index 88a6a5f503761..f5aa4c072d521 100644 --- a/libs/langchain/langchain/retrievers/zep.py +++ b/libs/langchain/langchain/retrievers/zep.py @@ -3,8 +3,9 @@ from enum import Enum from typing import TYPE_CHECKING, Any, Dict, List, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/retrievers/zilliz.py b/libs/langchain/langchain/retrievers/zilliz.py index f144ca4cc2b06..d14b85f840605 100644 --- a/libs/langchain/langchain/retrievers/zilliz.py +++ b/libs/langchain/langchain/retrievers/zilliz.py @@ -1,9 +1,10 @@ import warnings from typing import Any, Dict, List, Optional +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever, Document -from langchain_core.schema.embeddings import Embeddings +from langchain_core.retrievers import BaseRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.vectorstores.zilliz import Zilliz diff --git a/libs/langchain/langchain/runnables/openai_functions.py b/libs/langchain/langchain/runnables/openai_functions.py index f03d76b6495b8..2c31dd4d3a73a 100644 --- a/libs/langchain/langchain/runnables/openai_functions.py +++ b/libs/langchain/langchain/runnables/openai_functions.py @@ -1,9 +1,9 @@ from operator import itemgetter from typing import Any, Callable, List, Mapping, Optional, Union +from langchain_core.messages import BaseMessage from langchain_core.runnables import RouterRunnable, Runnable from langchain_core.runnables.base import RunnableBindingBase -from langchain_core.schema.messages import BaseMessage from typing_extensions import TypedDict from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser diff --git a/libs/langchain/langchain/schema/__init__.py b/libs/langchain/langchain/schema/__init__.py index 7e1742cc32796..59eead3ba3f36 100644 --- a/libs/langchain/langchain/schema/__init__.py +++ b/libs/langchain/langchain/schema/__init__.py @@ -1,11 +1,12 @@ """**Schemas** are the LangChain Base Classes and Interfaces.""" -from langchain_core.schema.agent import AgentAction, AgentFinish -from langchain_core.schema.cache import BaseCache -from langchain_core.schema.chat_history import BaseChatMessageHistory -from langchain_core.schema.document import BaseDocumentTransformer, Document -from langchain_core.schema.exceptions import LangChainException -from langchain_core.schema.memory import BaseMemory -from langchain_core.schema.messages import ( +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.caches import BaseCache +from langchain_core.chat_history import BaseChatMessageHistory +from langchain_core.document_transformers import BaseDocumentTransformer +from langchain_core.documents import Document +from langchain_core.exceptions import LangChainException, OutputParserException +from langchain_core.memory import BaseMemory +from langchain_core.messages import ( AIMessage, BaseMessage, ChatMessage, @@ -13,28 +14,26 @@ HumanMessage, SystemMessage, _message_from_dict, - _message_to_dict, get_buffer_string, messages_from_dict, messages_to_dict, ) -from langchain_core.schema.output import ( +from langchain_core.messages.base import message_to_dict +from langchain_core.output_parsers import ( + BaseLLMOutputParser, + BaseOutputParser, + StrOutputParser, +) +from langchain_core.outputs import ( ChatGeneration, ChatResult, Generation, LLMResult, RunInfo, ) -from langchain_core.schema.output_parser import ( - BaseLLMOutputParser, - BaseOutputParser, - OutputParserException, - StrOutputParser, -) -from langchain_core.schema.prompt import PromptValue -from langchain_core.schema.prompt_template import BasePromptTemplate, format_document -from langchain_core.schema.retriever import BaseRetriever -from langchain_core.schema.storage import BaseStore +from langchain_core.prompts import BasePromptTemplate, PromptValue, format_document +from langchain_core.retrievers import BaseRetriever +from langchain_core.stores import BaseStore RUN_KEY = "__run" Memory = BaseMemory @@ -56,7 +55,7 @@ "SystemMessage", "messages_from_dict", "messages_to_dict", - "_message_to_dict", + "message_to_dict", "_message_from_dict", "get_buffer_string", "RunInfo", diff --git a/libs/langchain/langchain/schema/agent.py b/libs/langchain/langchain/schema/agent.py index f5064f91811e1..498a4aea7c542 100644 --- a/libs/langchain/langchain/schema/agent.py +++ b/libs/langchain/langchain/schema/agent.py @@ -1,3 +1,3 @@ -from langchain_core.schema.agent import AgentAction, AgentActionMessageLog, AgentFinish +from langchain_core.agents import AgentAction, AgentActionMessageLog, AgentFinish __all__ = ["AgentAction", "AgentActionMessageLog", "AgentFinish"] diff --git a/libs/langchain/langchain/schema/cache.py b/libs/langchain/langchain/schema/cache.py index 145b1674354c8..7a510f0617868 100644 --- a/libs/langchain/langchain/schema/cache.py +++ b/libs/langchain/langchain/schema/cache.py @@ -1,3 +1,3 @@ -from langchain_core.schema.cache import BaseCache +from langchain_core.caches import BaseCache __all__ = ["BaseCache"] diff --git a/libs/langchain/langchain/schema/callbacks/tracers/base.py b/libs/langchain/langchain/schema/callbacks/tracers/base.py index 8f9e3d61578b4..ce38a8bcf3be6 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/base.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/base.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.base import BaseTracer, TracerException +from langchain_core.tracers.base import BaseTracer, TracerException __all__ = ["TracerException", "BaseTracer"] diff --git a/libs/langchain/langchain/schema/callbacks/tracers/evaluation.py b/libs/langchain/langchain/schema/callbacks/tracers/evaluation.py index c847c53d285c4..363c74fcc85e7 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/evaluation.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/evaluation.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.evaluation import ( +from langchain_core.tracers.evaluation import ( EvaluatorCallbackHandler, wait_for_all_evaluators, ) diff --git a/libs/langchain/langchain/schema/callbacks/tracers/langchain.py b/libs/langchain/langchain/schema/callbacks/tracers/langchain.py index 284ae533f9a5e..8415dfab94533 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/langchain.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/langchain.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.langchain import ( +from langchain_core.tracers.langchain import ( LangChainTracer, get_client, log_error_once, diff --git a/libs/langchain/langchain/schema/callbacks/tracers/langchain_v1.py b/libs/langchain/langchain/schema/callbacks/tracers/langchain_v1.py index 96154af452b0d..fca2d7590f132 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/langchain_v1.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/langchain_v1.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.langchain_v1 import LangChainTracerV1, get_headers +from langchain_core.tracers.langchain_v1 import LangChainTracerV1, get_headers __all__ = ["get_headers", "LangChainTracerV1"] diff --git a/libs/langchain/langchain/schema/callbacks/tracers/log_stream.py b/libs/langchain/langchain/schema/callbacks/tracers/log_stream.py index e7e29ba69cc74..ef3dd946d6626 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/log_stream.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/log_stream.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.log_stream import ( +from langchain_core.tracers.log_stream import ( LogEntry, LogStreamCallbackHandler, RunLog, diff --git a/libs/langchain/langchain/schema/callbacks/tracers/root_listeners.py b/libs/langchain/langchain/schema/callbacks/tracers/root_listeners.py index f57b31c938d3e..0dee9bce2d233 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/root_listeners.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/root_listeners.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.root_listeners import RootListenersTracer +from langchain_core.tracers.root_listeners import RootListenersTracer __all__ = ["RootListenersTracer"] diff --git a/libs/langchain/langchain/schema/callbacks/tracers/run_collector.py b/libs/langchain/langchain/schema/callbacks/tracers/run_collector.py index 1e872946631ee..1240026bfb61e 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/run_collector.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/run_collector.py @@ -1,3 +1,3 @@ -from langchain_core.callbacks.tracers.run_collector import RunCollectorCallbackHandler +from langchain_core.tracers.run_collector import RunCollectorCallbackHandler __all__ = ["RunCollectorCallbackHandler"] diff --git a/libs/langchain/langchain/schema/callbacks/tracers/schemas.py b/libs/langchain/langchain/schema/callbacks/tracers/schemas.py index 87e0f35b4a932..6fb49dbf72489 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/schemas.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/schemas.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.schemas import ( +from langchain_core.tracers.schemas import ( BaseRun, ChainRun, LLMRun, diff --git a/libs/langchain/langchain/schema/callbacks/tracers/stdout.py b/libs/langchain/langchain/schema/callbacks/tracers/stdout.py index 1bb931a21e4ee..b2ef6fdcfecca 100644 --- a/libs/langchain/langchain/schema/callbacks/tracers/stdout.py +++ b/libs/langchain/langchain/schema/callbacks/tracers/stdout.py @@ -1,4 +1,4 @@ -from langchain_core.callbacks.tracers.stdout import ( +from langchain_core.tracers.stdout import ( ConsoleCallbackHandler, FunctionCallbackHandler, elapsed, diff --git a/libs/langchain/langchain/schema/chat.py b/libs/langchain/langchain/schema/chat.py index 0dfc5dd876c1b..1f9080f8f372a 100644 --- a/libs/langchain/langchain/schema/chat.py +++ b/libs/langchain/langchain/schema/chat.py @@ -1,3 +1,3 @@ -from langchain_core.schema.chat import ChatSession +from langchain_core.chat_sessions import ChatSession __all__ = ["ChatSession"] diff --git a/libs/langchain/langchain/schema/chat_history.py b/libs/langchain/langchain/schema/chat_history.py index 0321f0216720f..08dcffe47ec60 100644 --- a/libs/langchain/langchain/schema/chat_history.py +++ b/libs/langchain/langchain/schema/chat_history.py @@ -1,3 +1,3 @@ -from langchain_core.schema.chat_history import BaseChatMessageHistory +from langchain_core.chat_history import BaseChatMessageHistory __all__ = ["BaseChatMessageHistory"] diff --git a/libs/langchain/langchain/schema/document.py b/libs/langchain/langchain/schema/document.py index 97ee9844f588e..8b5b254be953a 100644 --- a/libs/langchain/langchain/schema/document.py +++ b/libs/langchain/langchain/schema/document.py @@ -1,3 +1,3 @@ -from langchain_core.schema.document import BaseDocumentTransformer, Document +from langchain_core.documents import BaseDocumentTransformer, Document __all__ = ["Document", "BaseDocumentTransformer"] diff --git a/libs/langchain/langchain/schema/embeddings.py b/libs/langchain/langchain/schema/embeddings.py index a5ada340d9dfb..dd38b6276e1b8 100644 --- a/libs/langchain/langchain/schema/embeddings.py +++ b/libs/langchain/langchain/schema/embeddings.py @@ -1,3 +1,3 @@ -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings __all__ = ["Embeddings"] diff --git a/libs/langchain/langchain/schema/exceptions.py b/libs/langchain/langchain/schema/exceptions.py index be1ad0b900c97..a26216c688b51 100644 --- a/libs/langchain/langchain/schema/exceptions.py +++ b/libs/langchain/langchain/schema/exceptions.py @@ -1,3 +1,3 @@ -from langchain_core.schema.exceptions import LangChainException +from langchain_core.exceptions import LangChainException __all__ = ["LangChainException"] diff --git a/libs/langchain/langchain/schema/language_model.py b/libs/langchain/langchain/schema/language_model.py index 4f7ea44926a33..f03ee8279f2d5 100644 --- a/libs/langchain/langchain/schema/language_model.py +++ b/libs/langchain/langchain/schema/language_model.py @@ -1,3 +1,3 @@ -from langchain_core.schema.language_model import BaseLanguageModel, get_tokenizer +from langchain_core.language_models import BaseLanguageModel, get_tokenizer __all__ = ["get_tokenizer", "BaseLanguageModel"] diff --git a/libs/langchain/langchain/schema/memory.py b/libs/langchain/langchain/schema/memory.py index 325840af67f2c..d2f3d73e6138e 100644 --- a/libs/langchain/langchain/schema/memory.py +++ b/libs/langchain/langchain/schema/memory.py @@ -1,3 +1,3 @@ -from langchain_core.schema.memory import BaseMemory +from langchain_core.memory import BaseMemory __all__ = ["BaseMemory"] diff --git a/libs/langchain/langchain/schema/messages.py b/libs/langchain/langchain/schema/messages.py index 8d23953bb3a80..7aa60748ecc09 100644 --- a/libs/langchain/langchain/schema/messages.py +++ b/libs/langchain/langchain/schema/messages.py @@ -1,4 +1,4 @@ -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, BaseMessage, diff --git a/libs/langchain/langchain/schema/output.py b/libs/langchain/langchain/schema/output.py index 7ed8ecc0dcfc5..970986df560de 100644 --- a/libs/langchain/langchain/schema/output.py +++ b/libs/langchain/langchain/schema/output.py @@ -1,4 +1,4 @@ -from langchain_core.schema.output import ( +from langchain_core.outputs import ( ChatGeneration, ChatGenerationChunk, ChatResult, diff --git a/libs/langchain/langchain/schema/output_parser.py b/libs/langchain/langchain/schema/output_parser.py index 3702e9ea168da..c29648059c54c 100644 --- a/libs/langchain/langchain/schema/output_parser.py +++ b/libs/langchain/langchain/schema/output_parser.py @@ -1,4 +1,4 @@ -from langchain_core.schema.output_parser import ( +from langchain_core.output_parsers import ( BaseCumulativeTransformOutputParser, BaseGenerationOutputParser, BaseLLMOutputParser, diff --git a/libs/langchain/langchain/schema/prompt.py b/libs/langchain/langchain/schema/prompt.py index 07a61bf1cb942..25564c2f07631 100644 --- a/libs/langchain/langchain/schema/prompt.py +++ b/libs/langchain/langchain/schema/prompt.py @@ -1,3 +1,3 @@ -from langchain_core.schema.prompt import PromptValue +from langchain_core.prompts import PromptValue __all__ = ["PromptValue"] diff --git a/libs/langchain/langchain/schema/prompt_template.py b/libs/langchain/langchain/schema/prompt_template.py index a2dbb4fce55cf..49a3595b036dc 100644 --- a/libs/langchain/langchain/schema/prompt_template.py +++ b/libs/langchain/langchain/schema/prompt_template.py @@ -1,3 +1,3 @@ -from langchain_core.schema.prompt_template import BasePromptTemplate, format_document +from langchain_core.prompts import BasePromptTemplate, format_document __all__ = ["BasePromptTemplate", "format_document"] diff --git a/libs/langchain/langchain/schema/retriever.py b/libs/langchain/langchain/schema/retriever.py index b58b13ef74931..ca795d341dd41 100644 --- a/libs/langchain/langchain/schema/retriever.py +++ b/libs/langchain/langchain/schema/retriever.py @@ -1,3 +1,3 @@ -from langchain_core.schema.retriever import BaseRetriever +from langchain_core.retrievers import BaseRetriever __all__ = ["BaseRetriever"] diff --git a/libs/langchain/langchain/schema/storage.py b/libs/langchain/langchain/schema/storage.py index 7ed3443be8d43..1e64829655b8a 100644 --- a/libs/langchain/langchain/schema/storage.py +++ b/libs/langchain/langchain/schema/storage.py @@ -1,3 +1,3 @@ -from langchain_core.schema.storage import BaseStore +from langchain_core.stores import BaseStore __all__ = ["BaseStore"] diff --git a/libs/langchain/langchain/schema/vectorstore.py b/libs/langchain/langchain/schema/vectorstore.py index 5be4e018853a3..59a719b541eea 100644 --- a/libs/langchain/langchain/schema/vectorstore.py +++ b/libs/langchain/langchain/schema/vectorstore.py @@ -1,3 +1,3 @@ -from langchain_core.schema.vectorstore import VectorStore, VectorStoreRetriever +from langchain_core.vectorstores import VectorStore, VectorStoreRetriever __all__ = ["VectorStore", "VectorStoreRetriever"] diff --git a/libs/langchain/langchain/smith/evaluation/config.py b/libs/langchain/langchain/smith/evaluation/config.py index 95483cce516ba..4db1f0c512b83 100644 --- a/libs/langchain/langchain/smith/evaluation/config.py +++ b/libs/langchain/langchain/smith/evaluation/config.py @@ -2,10 +2,10 @@ from typing import Any, Dict, List, Optional, Union +from langchain_core.embeddings import Embeddings +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts import BasePromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.prompt_template import BasePromptTemplate from langsmith import RunEvaluator from langchain.evaluation.criteria.eval_chain import CRITERIA_TYPE diff --git a/libs/langchain/langchain/smith/evaluation/progress.py b/libs/langchain/langchain/smith/evaluation/progress.py index 4471ed1828972..bf25bec3fbf96 100644 --- a/libs/langchain/langchain/smith/evaluation/progress.py +++ b/libs/langchain/langchain/smith/evaluation/progress.py @@ -3,8 +3,8 @@ from typing import Any, Dict, Optional, Sequence from uuid import UUID -from langchain_core.schema.document import Document -from langchain_core.schema.output import LLMResult +from langchain_core.documents import Document +from langchain_core.outputs import LLMResult from langchain.callbacks import base as base_callbacks diff --git a/libs/langchain/langchain/smith/evaluation/runner_utils.py b/libs/langchain/langchain/smith/evaluation/runner_utils.py index 5caa28b71b552..a3284cd2e5878 100644 --- a/libs/langchain/langchain/smith/evaluation/runner_utils.py +++ b/libs/langchain/langchain/smith/evaluation/runner_utils.py @@ -21,12 +21,17 @@ ) from langchain_core._api import warn_deprecated +from langchain_core.language_models import BaseLanguageModel +from langchain_core.messages import BaseMessage, messages_from_dict +from langchain_core.outputs import ChatResult, LLMResult from langchain_core.runnables import Runnable, RunnableConfig, RunnableLambda from langchain_core.runnables import config as runnable_config from langchain_core.runnables import utils as runnable_utils -from langchain_core.schema import ChatResult, LLMResult -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.messages import BaseMessage, messages_from_dict +from langchain_core.tracers.evaluation import ( + EvaluatorCallbackHandler, + wait_for_all_evaluators, +) +from langchain_core.tracers.langchain import LangChainTracer from langsmith.client import Client from langsmith.evaluation import RunEvaluator from langsmith.run_helpers import as_runnable, is_traceable_function @@ -35,11 +40,6 @@ from requests import HTTPError from langchain.callbacks.manager import Callbacks -from langchain.callbacks.tracers.evaluation import ( - EvaluatorCallbackHandler, - wait_for_all_evaluators, -) -from langchain.callbacks.tracers.langchain import LangChainTracer from langchain.chains.base import Chain from langchain.evaluation.loading import load_evaluator from langchain.evaluation.schema import ( diff --git a/libs/langchain/langchain/smith/evaluation/string_run_evaluator.py b/libs/langchain/langchain/smith/evaluation/string_run_evaluator.py index 6c9a362a7f603..64bd51bb9f2b3 100644 --- a/libs/langchain/langchain/smith/evaluation/string_run_evaluator.py +++ b/libs/langchain/langchain/smith/evaluation/string_run_evaluator.py @@ -7,8 +7,7 @@ from langchain_core.load.dump import dumpd from langchain_core.load.load import load from langchain_core.load.serializable import Serializable -from langchain_core.schema import RUN_KEY, messages_from_dict -from langchain_core.schema.messages import BaseMessage, get_buffer_string +from langchain_core.messages import BaseMessage, get_buffer_string, messages_from_dict from langsmith import EvaluationResult, RunEvaluator from langsmith.schemas import DataType, Example, Run @@ -18,6 +17,7 @@ ) from langchain.chains.base import Chain from langchain.evaluation.schema import StringEvaluator +from langchain.schema import RUN_KEY def _get_messages_from_run_dict(messages: List[dict]) -> List[BaseMessage]: diff --git a/libs/langchain/langchain/storage/_lc_store.py b/libs/langchain/langchain/storage/_lc_store.py index 73916b9ded2d9..c38f66cb72e6b 100644 --- a/libs/langchain/langchain/storage/_lc_store.py +++ b/libs/langchain/langchain/storage/_lc_store.py @@ -1,10 +1,9 @@ """Create a key-value store for any langchain serializable object.""" from typing import Callable, Optional -from langchain_core.load.dump import dumps -from langchain_core.load.load import loads -from langchain_core.load.serializable import Serializable -from langchain_core.schema import BaseStore, Document +from langchain_core.documents import Document +from langchain_core.load import Serializable, dumps, loads +from langchain_core.stores import BaseStore from langchain.storage.encoder_backed import EncoderBackedStore diff --git a/libs/langchain/langchain/storage/encoder_backed.py b/libs/langchain/langchain/storage/encoder_backed.py index 026d76f4dd408..fcb90226083be 100644 --- a/libs/langchain/langchain/storage/encoder_backed.py +++ b/libs/langchain/langchain/storage/encoder_backed.py @@ -10,7 +10,7 @@ Union, ) -from langchain_core.schema import BaseStore +from langchain_core.stores import BaseStore K = TypeVar("K") V = TypeVar("V") diff --git a/libs/langchain/langchain/storage/exceptions.py b/libs/langchain/langchain/storage/exceptions.py index fedc9c7bf87cd..d7231de65c45c 100644 --- a/libs/langchain/langchain/storage/exceptions.py +++ b/libs/langchain/langchain/storage/exceptions.py @@ -1,4 +1,4 @@ -from langchain_core.schema import LangChainException +from langchain_core.exceptions import LangChainException class InvalidKeyException(LangChainException): diff --git a/libs/langchain/langchain/storage/file_system.py b/libs/langchain/langchain/storage/file_system.py index 3cead7e0236f2..dadd3720bda0b 100644 --- a/libs/langchain/langchain/storage/file_system.py +++ b/libs/langchain/langchain/storage/file_system.py @@ -2,7 +2,7 @@ from pathlib import Path from typing import Iterator, List, Optional, Sequence, Tuple, Union -from langchain_core.schema import BaseStore +from langchain_core.stores import BaseStore from langchain.storage.exceptions import InvalidKeyException diff --git a/libs/langchain/langchain/storage/in_memory.py b/libs/langchain/langchain/storage/in_memory.py index 48e014e839d48..60d8ad5516c3e 100644 --- a/libs/langchain/langchain/storage/in_memory.py +++ b/libs/langchain/langchain/storage/in_memory.py @@ -5,7 +5,7 @@ """ from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple -from langchain_core.schema import BaseStore +from langchain_core.stores import BaseStore class InMemoryStore(BaseStore[str, Any]): diff --git a/libs/langchain/langchain/storage/redis.py b/libs/langchain/langchain/storage/redis.py index 6b5efbbc5e18b..d213f8cc051cb 100644 --- a/libs/langchain/langchain/storage/redis.py +++ b/libs/langchain/langchain/storage/redis.py @@ -1,6 +1,6 @@ from typing import Any, Iterator, List, Optional, Sequence, Tuple, cast -from langchain_core.schema import BaseStore +from langchain_core.stores import BaseStore from langchain.utilities.redis import get_client diff --git a/libs/langchain/langchain/storage/upstash_redis.py b/libs/langchain/langchain/storage/upstash_redis.py index 194982203bd80..193b8c7c4b5c3 100644 --- a/libs/langchain/langchain/storage/upstash_redis.py +++ b/libs/langchain/langchain/storage/upstash_redis.py @@ -1,6 +1,6 @@ from typing import Any, Iterator, List, Optional, Sequence, Tuple, cast -from langchain_core.schema import BaseStore +from langchain_core.stores import BaseStore class UpstashRedisStore(BaseStore[str, str]): diff --git a/libs/langchain/langchain/text_splitter.py b/libs/langchain/langchain/text_splitter.py index 095693e3daec6..856cd9417cab3 100644 --- a/libs/langchain/langchain/text_splitter.py +++ b/libs/langchain/langchain/text_splitter.py @@ -51,7 +51,7 @@ ) import requests -from langchain_core.schema import BaseDocumentTransformer +from langchain_core.document_transformers import BaseDocumentTransformer from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/tools/base.py b/libs/langchain/langchain/tools/base.py index ca8323e182548..ff81eaa895620 100644 --- a/libs/langchain/langchain/tools/base.py +++ b/libs/langchain/langchain/tools/base.py @@ -1,4 +1,4 @@ -from langchain_core.tool import ( +from langchain_core.tools import ( BaseTool, SchemaAnnotationError, StructuredTool, diff --git a/libs/langchain/langchain/tools/retriever.py b/libs/langchain/langchain/tools/retriever.py index 96517cbb43a02..1f613cf44fab6 100644 --- a/libs/langchain/langchain/tools/retriever.py +++ b/libs/langchain/langchain/tools/retriever.py @@ -1,5 +1,5 @@ from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import BaseRetriever +from langchain_core.retrievers import BaseRetriever from langchain.tools import Tool diff --git a/libs/langchain/langchain/tools/spark_sql/tool.py b/libs/langchain/langchain/tools/spark_sql/tool.py index ccb15e1f440f7..5a79fd45be516 100644 --- a/libs/langchain/langchain/tools/spark_sql/tool.py +++ b/libs/langchain/langchain/tools/spark_sql/tool.py @@ -4,7 +4,7 @@ from langchain_core.pydantic_v1 import BaseModel, Field, root_validator -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, diff --git a/libs/langchain/langchain/tools/sql_database/tool.py b/libs/langchain/langchain/tools/sql_database/tool.py index 8606c28ff242f..c90186cfc6441 100644 --- a/libs/langchain/langchain/tools/sql_database/tool.py +++ b/libs/langchain/langchain/tools/sql_database/tool.py @@ -4,7 +4,7 @@ from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, diff --git a/libs/langchain/langchain/tools/vectorstore/tool.py b/libs/langchain/langchain/tools/vectorstore/tool.py index e0192243dd754..c55bc3770ea9f 100644 --- a/libs/langchain/langchain/tools/vectorstore/tool.py +++ b/libs/langchain/langchain/tools/vectorstore/tool.py @@ -3,9 +3,9 @@ import json from typing import Any, Dict, Optional +from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema.language_model import BaseLanguageModel -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import CallbackManagerForToolRun from langchain.llms.openai import OpenAI diff --git a/libs/langchain/langchain/tools/yahoo_finance_news.py b/libs/langchain/langchain/tools/yahoo_finance_news.py index ee63f416d5a8d..c7867f8135fbc 100644 --- a/libs/langchain/langchain/tools/yahoo_finance_news.py +++ b/libs/langchain/langchain/tools/yahoo_finance_news.py @@ -1,6 +1,6 @@ from typing import Iterable, Optional -from langchain_core.schema import Document +from langchain_core.documents import Document from requests.exceptions import HTTPError, ReadTimeout from urllib3.exceptions import ConnectionError diff --git a/libs/langchain/langchain/utilities/apify.py b/libs/langchain/langchain/utilities/apify.py index 7a2d04680f898..bb6dc9ca7a157 100644 --- a/libs/langchain/langchain/utilities/apify.py +++ b/libs/langchain/langchain/utilities/apify.py @@ -1,7 +1,7 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/utilities/arcee.py b/libs/langchain/langchain/utilities/arcee.py index 844f2c1591075..318af14eb5758 100644 --- a/libs/langchain/langchain/utilities/arcee.py +++ b/libs/langchain/langchain/utilities/arcee.py @@ -7,7 +7,7 @@ import requests from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema.retriever import Document +from langchain_core.retrievers import Document class ArceeRoute(str, Enum): diff --git a/libs/langchain/langchain/utilities/arxiv.py b/libs/langchain/langchain/utilities/arxiv.py index f74d3d4f9eedb..b57b37154e13c 100644 --- a/libs/langchain/langchain/utilities/arxiv.py +++ b/libs/langchain/langchain/utilities/arxiv.py @@ -4,8 +4,8 @@ import re from typing import Any, Dict, List, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import Document logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/utilities/brave_search.py b/libs/langchain/langchain/utilities/brave_search.py index 91ab8e8bbbc2c..8f3df0666c6af 100644 --- a/libs/langchain/langchain/utilities/brave_search.py +++ b/libs/langchain/langchain/utilities/brave_search.py @@ -2,8 +2,8 @@ from typing import List import requests +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import Document class BraveSearchWrapper(BaseModel): diff --git a/libs/langchain/langchain/utilities/pubmed.py b/libs/langchain/langchain/utilities/pubmed.py index 6a2e78abb038b..981799d9de3a3 100644 --- a/libs/langchain/langchain/utilities/pubmed.py +++ b/libs/langchain/langchain/utilities/pubmed.py @@ -6,8 +6,8 @@ import urllib.request from typing import Any, Dict, Iterator, List +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import Document logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/utilities/tensorflow_datasets.py b/libs/langchain/langchain/utilities/tensorflow_datasets.py index cc5b2fdf89469..0a7cb35f88f8e 100644 --- a/libs/langchain/langchain/utilities/tensorflow_datasets.py +++ b/libs/langchain/langchain/utilities/tensorflow_datasets.py @@ -1,8 +1,8 @@ import logging from typing import Any, Callable, Dict, Iterator, List, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import Document logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/utilities/wikipedia.py b/libs/langchain/langchain/utilities/wikipedia.py index 6df84925bda46..c1f53e7a3d76e 100644 --- a/libs/langchain/langchain/utilities/wikipedia.py +++ b/libs/langchain/langchain/utilities/wikipedia.py @@ -2,8 +2,8 @@ import logging from typing import Any, Dict, List, Optional +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel, root_validator -from langchain_core.schema import Document logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/vectorstores/__init__.py b/libs/langchain/langchain/vectorstores/__init__.py index d91d125d0b126..863c702498509 100644 --- a/libs/langchain/langchain/vectorstores/__init__.py +++ b/libs/langchain/langchain/vectorstores/__init__.py @@ -21,7 +21,7 @@ from typing import Any -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore def _import_alibaba_cloud_open_search() -> Any: diff --git a/libs/langchain/langchain/vectorstores/alibabacloud_opensearch.py b/libs/langchain/langchain/vectorstores/alibabacloud_opensearch.py index f1ed358684baa..c9d5b85a6bd4c 100644 --- a/libs/langchain/langchain/vectorstores/alibabacloud_opensearch.py +++ b/libs/langchain/langchain/vectorstores/alibabacloud_opensearch.py @@ -4,9 +4,9 @@ from hashlib import sha1 from typing import Any, Dict, Iterable, List, Optional, Tuple -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore logger = logging.getLogger() diff --git a/libs/langchain/langchain/vectorstores/analyticdb.py b/libs/langchain/langchain/vectorstores/analyticdb.py index c27ee0ac98e00..335b680154ab3 100644 --- a/libs/langchain/langchain/vectorstores/analyticdb.py +++ b/libs/langchain/langchain/vectorstores/analyticdb.py @@ -12,8 +12,8 @@ except ImportError: from sqlalchemy.ext.declarative import declarative_base -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/vectorstores/annoy.py b/libs/langchain/langchain/vectorstores/annoy.py index 4054285a43bd9..ea0332b854084 100644 --- a/libs/langchain/langchain/vectorstores/annoy.py +++ b/libs/langchain/langchain/vectorstores/annoy.py @@ -8,8 +8,8 @@ from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.base import Docstore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/astradb.py b/libs/langchain/langchain/vectorstores/astradb.py index f9a428ea9c4ab..156b13245ef72 100644 --- a/libs/langchain/langchain/vectorstores/astradb.py +++ b/libs/langchain/langchain/vectorstores/astradb.py @@ -17,9 +17,9 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings from langchain_core.utils.iter import batch_iterate +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/atlas.py b/libs/langchain/langchain/vectorstores/atlas.py index 230a123a7b51d..96de8a90b0782 100644 --- a/libs/langchain/langchain/vectorstores/atlas.py +++ b/libs/langchain/langchain/vectorstores/atlas.py @@ -5,8 +5,8 @@ from typing import Any, Iterable, List, Optional, Type import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/awadb.py b/libs/langchain/langchain/vectorstores/awadb.py index 79cf3d072ea27..ed4685016abb0 100644 --- a/libs/langchain/langchain/vectorstores/awadb.py +++ b/libs/langchain/langchain/vectorstores/awadb.py @@ -5,8 +5,8 @@ from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple, Type import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/azure_cosmos_db.py b/libs/langchain/langchain/vectorstores/azure_cosmos_db.py index 4002eaf4db0e0..91a3536fe3bc1 100644 --- a/libs/langchain/langchain/vectorstores/azure_cosmos_db.py +++ b/libs/langchain/langchain/vectorstores/azure_cosmos_db.py @@ -22,7 +22,7 @@ from langchain.vectorstores.utils import maximal_marginal_relevance if TYPE_CHECKING: - from langchain_core.schema.embeddings import Embeddings + from langchain_core.embeddings import Embeddings from pymongo.collection import Collection diff --git a/libs/langchain/langchain/vectorstores/azuresearch.py b/libs/langchain/langchain/vectorstores/azuresearch.py index 4fbd766bc8473..d65a84b24a3c5 100644 --- a/libs/langchain/langchain/vectorstores/azuresearch.py +++ b/libs/langchain/langchain/vectorstores/azuresearch.py @@ -17,10 +17,10 @@ ) import numpy as np +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import root_validator -from langchain_core.schema import BaseRetriever -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/langchain/vectorstores/bageldb.py b/libs/langchain/langchain/vectorstores/bageldb.py index 870673a12dd8d..9d298ebd29b36 100644 --- a/libs/langchain/langchain/vectorstores/bageldb.py +++ b/libs/langchain/langchain/vectorstores/bageldb.py @@ -18,9 +18,9 @@ import bagel.config from bagel.api.types import ID, OneOrMany, Where, WhereDocument -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings from langchain_core.utils import xor_args +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/baiducloud_vector_search.py b/libs/langchain/langchain/vectorstores/baiducloud_vector_search.py index a66b1104486db..f56250d897343 100644 --- a/libs/langchain/langchain/vectorstores/baiducloud_vector_search.py +++ b/libs/langchain/langchain/vectorstores/baiducloud_vector_search.py @@ -12,8 +12,8 @@ Union, ) -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/base.py b/libs/langchain/langchain/vectorstores/base.py index 5be4e018853a3..59a719b541eea 100644 --- a/libs/langchain/langchain/vectorstores/base.py +++ b/libs/langchain/langchain/vectorstores/base.py @@ -1,3 +1,3 @@ -from langchain_core.schema.vectorstore import VectorStore, VectorStoreRetriever +from langchain_core.vectorstores import VectorStore, VectorStoreRetriever __all__ = ["VectorStore", "VectorStoreRetriever"] diff --git a/libs/langchain/langchain/vectorstores/cassandra.py b/libs/langchain/langchain/vectorstores/cassandra.py index 194a89f2783f9..e64dd4971bf4a 100644 --- a/libs/langchain/langchain/vectorstores/cassandra.py +++ b/libs/langchain/langchain/vectorstores/cassandra.py @@ -20,8 +20,8 @@ if typing.TYPE_CHECKING: from cassandra.cluster import Session -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/chroma.py b/libs/langchain/langchain/vectorstores/chroma.py index 476712b0783df..d086b881a74f9 100644 --- a/libs/langchain/langchain/vectorstores/chroma.py +++ b/libs/langchain/langchain/vectorstores/chroma.py @@ -16,9 +16,9 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings from langchain_core.utils import xor_args +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/clarifai.py b/libs/langchain/langchain/vectorstores/clarifai.py index 132b39c7d5dc7..3b3728bb832fc 100644 --- a/libs/langchain/langchain/vectorstores/clarifai.py +++ b/libs/langchain/langchain/vectorstores/clarifai.py @@ -7,8 +7,8 @@ from typing import Any, Iterable, List, Optional, Tuple import requests -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/clickhouse.py b/libs/langchain/langchain/vectorstores/clickhouse.py index d833ef42c0fe3..d8565045758fd 100644 --- a/libs/langchain/langchain/vectorstores/clickhouse.py +++ b/libs/langchain/langchain/vectorstores/clickhouse.py @@ -6,9 +6,9 @@ from threading import Thread from typing import Any, Dict, Iterable, List, Optional, Tuple, Union +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseSettings -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/dashvector.py b/libs/langchain/langchain/vectorstores/dashvector.py index e151798a15d74..4eaed018a96e4 100644 --- a/libs/langchain/langchain/vectorstores/dashvector.py +++ b/libs/langchain/langchain/vectorstores/dashvector.py @@ -11,8 +11,8 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/vectorstores/deeplake.py b/libs/langchain/langchain/vectorstores/deeplake.py index d76667d6aaad4..bf1b5612cc19a 100644 --- a/libs/langchain/langchain/vectorstores/deeplake.py +++ b/libs/langchain/langchain/vectorstores/deeplake.py @@ -14,8 +14,8 @@ except ImportError: _DEEPLAKE_INSTALLED = False -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/dingo.py b/libs/langchain/langchain/vectorstores/dingo.py index 5e78b956da50a..1059019e58aac 100644 --- a/libs/langchain/langchain/vectorstores/dingo.py +++ b/libs/langchain/langchain/vectorstores/dingo.py @@ -5,8 +5,8 @@ from typing import Any, Iterable, List, Optional, Tuple import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/docarray/base.py b/libs/langchain/langchain/vectorstores/docarray/base.py index 6518e418b889e..09ca68fde0ac2 100644 --- a/libs/langchain/langchain/vectorstores/docarray/base.py +++ b/libs/langchain/langchain/vectorstores/docarray/base.py @@ -2,10 +2,10 @@ from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple, Type import numpy as np +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Field -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/docarray/hnsw.py b/libs/langchain/langchain/vectorstores/docarray/hnsw.py index 5fa44dd0eee09..10892059a06b4 100644 --- a/libs/langchain/langchain/vectorstores/docarray/hnsw.py +++ b/libs/langchain/langchain/vectorstores/docarray/hnsw.py @@ -2,7 +2,7 @@ from typing import Any, List, Literal, Optional -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.vectorstores.docarray.base import ( DocArrayIndex, diff --git a/libs/langchain/langchain/vectorstores/docarray/in_memory.py b/libs/langchain/langchain/vectorstores/docarray/in_memory.py index 78346abae9665..7e847ee272938 100644 --- a/libs/langchain/langchain/vectorstores/docarray/in_memory.py +++ b/libs/langchain/langchain/vectorstores/docarray/in_memory.py @@ -3,7 +3,7 @@ from typing import Any, Dict, List, Literal, Optional -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.vectorstores.docarray.base import ( DocArrayIndex, diff --git a/libs/langchain/langchain/vectorstores/elastic_vector_search.py b/libs/langchain/langchain/vectorstores/elastic_vector_search.py index dd5ef4aef145a..7b8ae5a42289b 100644 --- a/libs/langchain/langchain/vectorstores/elastic_vector_search.py +++ b/libs/langchain/langchain/vectorstores/elastic_vector_search.py @@ -15,8 +15,8 @@ ) from langchain_core._api import deprecated -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/vectorstores/elasticsearch.py b/libs/langchain/langchain/vectorstores/elasticsearch.py index 4d972381c5f01..7bb01f051ac8d 100644 --- a/libs/langchain/langchain/vectorstores/elasticsearch.py +++ b/libs/langchain/langchain/vectorstores/elasticsearch.py @@ -15,8 +15,8 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import DistanceStrategy, maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/epsilla.py b/libs/langchain/langchain/vectorstores/epsilla.py index bb11305b84f39..0a06f702747b4 100644 --- a/libs/langchain/langchain/vectorstores/epsilla.py +++ b/libs/langchain/langchain/vectorstores/epsilla.py @@ -5,8 +5,8 @@ import uuid from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Type -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/faiss.py b/libs/langchain/langchain/vectorstores/faiss.py index 39430fec17803..7b68c16402383 100644 --- a/libs/langchain/langchain/vectorstores/faiss.py +++ b/libs/langchain/langchain/vectorstores/faiss.py @@ -22,8 +22,8 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.base import AddableMixin, Docstore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/hippo.py b/libs/langchain/langchain/vectorstores/hippo.py index 59490e480404c..b995617f9567e 100644 --- a/libs/langchain/langchain/vectorstores/hippo.py +++ b/libs/langchain/langchain/vectorstores/hippo.py @@ -3,8 +3,8 @@ import logging from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/hologres.py b/libs/langchain/langchain/vectorstores/hologres.py index 93504adfb41b4..08fd5c46eb0ed 100644 --- a/libs/langchain/langchain/vectorstores/hologres.py +++ b/libs/langchain/langchain/vectorstores/hologres.py @@ -5,8 +5,8 @@ import uuid from typing import Any, Dict, Iterable, List, Optional, Tuple, Type -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/vectorstores/lancedb.py b/libs/langchain/langchain/vectorstores/lancedb.py index 4e795ef7df143..57b5a1186024f 100644 --- a/libs/langchain/langchain/vectorstores/lancedb.py +++ b/libs/langchain/langchain/vectorstores/lancedb.py @@ -3,8 +3,8 @@ import uuid from typing import Any, Iterable, List, Optional -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/llm_rails.py b/libs/langchain/langchain/vectorstores/llm_rails.py index 50e25f922f0bd..5c3eeb5be2097 100644 --- a/libs/langchain/langchain/vectorstores/llm_rails.py +++ b/libs/langchain/langchain/vectorstores/llm_rails.py @@ -8,9 +8,9 @@ from typing import Any, Iterable, List, Optional, Tuple import requests +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Field -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings from langchain.vectorstores.base import VectorStore, VectorStoreRetriever diff --git a/libs/langchain/langchain/vectorstores/marqo.py b/libs/langchain/langchain/vectorstores/marqo.py index 1d0e9d288c307..2e465513713cf 100644 --- a/libs/langchain/langchain/vectorstores/marqo.py +++ b/libs/langchain/langchain/vectorstores/marqo.py @@ -15,8 +15,8 @@ Union, ) -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/matching_engine.py b/libs/langchain/langchain/vectorstores/matching_engine.py index 9b5d8b42704e4..161e41bd5ec5a 100644 --- a/libs/langchain/langchain/vectorstores/matching_engine.py +++ b/libs/langchain/langchain/vectorstores/matching_engine.py @@ -6,9 +6,9 @@ import uuid from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple, Type -from langchain_core.schema.document import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.utilities.vertexai import get_client_info diff --git a/libs/langchain/langchain/vectorstores/meilisearch.py b/libs/langchain/langchain/vectorstores/meilisearch.py index 80f24241522f3..9924a6ecd0d4d 100644 --- a/libs/langchain/langchain/vectorstores/meilisearch.py +++ b/libs/langchain/langchain/vectorstores/meilisearch.py @@ -3,8 +3,8 @@ import uuid from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, Type -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/vectorstores/milvus.py b/libs/langchain/langchain/vectorstores/milvus.py index 835568483410a..a25b140d685b5 100644 --- a/libs/langchain/langchain/vectorstores/milvus.py +++ b/libs/langchain/langchain/vectorstores/milvus.py @@ -5,8 +5,8 @@ from uuid import uuid4 import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/momento_vector_index.py b/libs/langchain/langchain/vectorstores/momento_vector_index.py index db30d9251c713..dfcb09f713f06 100644 --- a/libs/langchain/langchain/vectorstores/momento_vector_index.py +++ b/libs/langchain/langchain/vectorstores/momento_vector_index.py @@ -11,8 +11,8 @@ ) from uuid import uuid4 -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/vectorstores/mongodb_atlas.py b/libs/langchain/langchain/vectorstores/mongodb_atlas.py index 872fa7e5757f3..146c8d5cf5518 100644 --- a/libs/langchain/langchain/vectorstores/mongodb_atlas.py +++ b/libs/langchain/langchain/vectorstores/mongodb_atlas.py @@ -15,8 +15,8 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/myscale.py b/libs/langchain/langchain/vectorstores/myscale.py index c57ca4c2a5c4e..4b55f4aa15740 100644 --- a/libs/langchain/langchain/vectorstores/myscale.py +++ b/libs/langchain/langchain/vectorstores/myscale.py @@ -6,9 +6,9 @@ from threading import Thread from typing import Any, Dict, Iterable, List, Optional, Tuple +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseSettings -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/neo4j_vector.py b/libs/langchain/langchain/vectorstores/neo4j_vector.py index 595656083cdbe..3856387295112 100644 --- a/libs/langchain/langchain/vectorstores/neo4j_vector.py +++ b/libs/langchain/langchain/vectorstores/neo4j_vector.py @@ -15,8 +15,8 @@ Type, ) -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/vectorstores/nucliadb.py b/libs/langchain/langchain/vectorstores/nucliadb.py index 0a649c7114a2f..d20e51bbc7cbc 100644 --- a/libs/langchain/langchain/vectorstores/nucliadb.py +++ b/libs/langchain/langchain/vectorstores/nucliadb.py @@ -1,9 +1,9 @@ import os from typing import Any, Dict, Iterable, List, Optional, Type -from langchain_core.schema.document import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VST, VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VST, VectorStore FIELD_TYPES = { "f": "files", diff --git a/libs/langchain/langchain/vectorstores/opensearch_vector_search.py b/libs/langchain/langchain/vectorstores/opensearch_vector_search.py index b2cc55c7e8ce6..81d58f8a8a249 100644 --- a/libs/langchain/langchain/vectorstores/opensearch_vector_search.py +++ b/libs/langchain/langchain/vectorstores/opensearch_vector_search.py @@ -5,9 +5,9 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple import numpy as np -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.utils import get_from_dict_or_env from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/pgembedding.py b/libs/langchain/langchain/vectorstores/pgembedding.py index db2f67b927a14..820731503984c 100644 --- a/libs/langchain/langchain/vectorstores/pgembedding.py +++ b/libs/langchain/langchain/vectorstores/pgembedding.py @@ -14,8 +14,8 @@ except ImportError: from sqlalchemy.ext.declarative import declarative_base -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/vectorstores/pgvecto_rs.py b/libs/langchain/langchain/vectorstores/pgvecto_rs.py index ff3c01477cf2a..9efdef27ca9d2 100644 --- a/libs/langchain/langchain/vectorstores/pgvecto_rs.py +++ b/libs/langchain/langchain/vectorstores/pgvecto_rs.py @@ -5,9 +5,9 @@ import numpy as np import sqlalchemy -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from sqlalchemy import insert, select from sqlalchemy.dialects import postgresql from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column diff --git a/libs/langchain/langchain/vectorstores/pgvector.py b/libs/langchain/langchain/vectorstores/pgvector.py index db8122e0f6763..08497e0d3e818 100644 --- a/libs/langchain/langchain/vectorstores/pgvector.py +++ b/libs/langchain/langchain/vectorstores/pgvector.py @@ -30,8 +30,8 @@ except ImportError: from sqlalchemy.ext.declarative import declarative_base -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/vectorstores/pinecone.py b/libs/langchain/langchain/vectorstores/pinecone.py index e7337cf726ec9..9d0df1e7e9984 100644 --- a/libs/langchain/langchain/vectorstores/pinecone.py +++ b/libs/langchain/langchain/vectorstores/pinecone.py @@ -6,9 +6,9 @@ from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Optional, Tuple, Union import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings from langchain_core.utils.iter import batch_iterate +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import DistanceStrategy, maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/qdrant.py b/libs/langchain/langchain/vectorstores/qdrant.py index 97f0e340c9fcd..acd0e06b6ee30 100644 --- a/libs/langchain/langchain/vectorstores/qdrant.py +++ b/libs/langchain/langchain/vectorstores/qdrant.py @@ -23,8 +23,8 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/redis/base.py b/libs/langchain/langchain/vectorstores/redis/base.py index 13156ccc1387f..4f2e8cb04bf1b 100644 --- a/libs/langchain/langchain/vectorstores/redis/base.py +++ b/libs/langchain/langchain/vectorstores/redis/base.py @@ -23,8 +23,8 @@ import numpy as np import yaml from langchain_core._api import deprecated -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore, VectorStoreRetriever +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore, VectorStoreRetriever from langchain.callbacks.manager import CallbackManagerForRetrieverRun from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/rocksetdb.py b/libs/langchain/langchain/vectorstores/rocksetdb.py index cae5b6de2d61f..9b6610c3a7dc0 100644 --- a/libs/langchain/langchain/vectorstores/rocksetdb.py +++ b/libs/langchain/langchain/vectorstores/rocksetdb.py @@ -4,8 +4,8 @@ from enum import Enum from typing import Any, Iterable, List, Optional, Tuple -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/scann.py b/libs/langchain/langchain/vectorstores/scann.py index 999b1c992bf13..4542be1031638 100644 --- a/libs/langchain/langchain/vectorstores/scann.py +++ b/libs/langchain/langchain/vectorstores/scann.py @@ -7,8 +7,8 @@ from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.base import AddableMixin, Docstore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/semadb.py b/libs/langchain/langchain/vectorstores/semadb.py index c7aeab3150cd5..8f8e26a58320c 100644 --- a/libs/langchain/langchain/vectorstores/semadb.py +++ b/libs/langchain/langchain/vectorstores/semadb.py @@ -3,9 +3,9 @@ import numpy as np import requests -from langchain_core.schema.document import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.utils import get_from_env from langchain.vectorstores.utils import DistanceStrategy diff --git a/libs/langchain/langchain/vectorstores/singlestoredb.py b/libs/langchain/langchain/vectorstores/singlestoredb.py index 27f0653303165..9a96b6fdfa6dc 100644 --- a/libs/langchain/langchain/vectorstores/singlestoredb.py +++ b/libs/langchain/langchain/vectorstores/singlestoredb.py @@ -12,8 +12,8 @@ Type, ) -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore, VectorStoreRetriever +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore, VectorStoreRetriever from sqlalchemy.pool import QueuePool from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/sklearn.py b/libs/langchain/langchain/vectorstores/sklearn.py index 34aea3ff5bb41..f7aef920abbc5 100644 --- a/libs/langchain/langchain/vectorstores/sklearn.py +++ b/libs/langchain/langchain/vectorstores/sklearn.py @@ -10,9 +10,9 @@ from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple, Type from uuid import uuid4 -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings from langchain_core.utils import guard_import +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/sqlitevss.py b/libs/langchain/langchain/vectorstores/sqlitevss.py index 0216c279d4e0c..2ee5e3d9b1dfc 100644 --- a/libs/langchain/langchain/vectorstores/sqlitevss.py +++ b/libs/langchain/langchain/vectorstores/sqlitevss.py @@ -13,8 +13,8 @@ Type, ) -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/starrocks.py b/libs/langchain/langchain/vectorstores/starrocks.py index 4bd64e4f457b0..31d51779db053 100644 --- a/libs/langchain/langchain/vectorstores/starrocks.py +++ b/libs/langchain/langchain/vectorstores/starrocks.py @@ -6,9 +6,9 @@ from threading import Thread from typing import Any, Dict, Iterable, List, Optional, Tuple +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseSettings -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/supabase.py b/libs/langchain/langchain/vectorstores/supabase.py index d37880eac0169..682b8bf4b7410 100644 --- a/libs/langchain/langchain/vectorstores/supabase.py +++ b/libs/langchain/langchain/vectorstores/supabase.py @@ -15,8 +15,8 @@ ) import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance @@ -46,7 +46,7 @@ class SupabaseVectorStore(VectorStore): .. code-block:: python from langchain.embeddings.openai import OpenAIEmbeddings - from langchain_core.schema import Document + from langchain_core.documents import Document from langchain.vectorstores import SupabaseVectorStore from supabase.client import create_client diff --git a/libs/langchain/langchain/vectorstores/tair.py b/libs/langchain/langchain/vectorstores/tair.py index 75a86ec8e6c13..a4dd92016d099 100644 --- a/libs/langchain/langchain/vectorstores/tair.py +++ b/libs/langchain/langchain/vectorstores/tair.py @@ -5,8 +5,8 @@ import uuid from typing import Any, Iterable, List, Optional, Type -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_dict_or_env diff --git a/libs/langchain/langchain/vectorstores/tencentvectordb.py b/libs/langchain/langchain/vectorstores/tencentvectordb.py index cc5cc94716983..c17ab9351c2b2 100644 --- a/libs/langchain/langchain/vectorstores/tencentvectordb.py +++ b/libs/langchain/langchain/vectorstores/tencentvectordb.py @@ -7,9 +7,9 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings from langchain_core.utils import guard_import +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/tigris.py b/libs/langchain/langchain/vectorstores/tigris.py index e168909c07c12..96038b7c74940 100644 --- a/libs/langchain/langchain/vectorstores/tigris.py +++ b/libs/langchain/langchain/vectorstores/tigris.py @@ -3,9 +3,9 @@ import itertools from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore if TYPE_CHECKING: from tigrisdb import TigrisClient diff --git a/libs/langchain/langchain/vectorstores/tiledb.py b/libs/langchain/langchain/vectorstores/tiledb.py index 8b144265b3e29..f2ba02034d67a 100644 --- a/libs/langchain/langchain/vectorstores/tiledb.py +++ b/libs/langchain/langchain/vectorstores/tiledb.py @@ -7,8 +7,8 @@ from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/timescalevector.py b/libs/langchain/langchain/vectorstores/timescalevector.py index b7fcbe77ca8fd..d755d6694a900 100644 --- a/libs/langchain/langchain/vectorstores/timescalevector.py +++ b/libs/langchain/langchain/vectorstores/timescalevector.py @@ -18,9 +18,9 @@ Union, ) -from langchain_core.schema.document import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.utils import get_from_dict_or_env from langchain.vectorstores.utils import DistanceStrategy diff --git a/libs/langchain/langchain/vectorstores/typesense.py b/libs/langchain/langchain/vectorstores/typesense.py index bb37192670f16..f8f5a687c7e38 100644 --- a/libs/langchain/langchain/vectorstores/typesense.py +++ b/libs/langchain/langchain/vectorstores/typesense.py @@ -3,8 +3,8 @@ import uuid from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple, Union -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.utils import get_from_env diff --git a/libs/langchain/langchain/vectorstores/usearch.py b/libs/langchain/langchain/vectorstores/usearch.py index 4c5a23533d5a2..463e32a7463ee 100644 --- a/libs/langchain/langchain/vectorstores/usearch.py +++ b/libs/langchain/langchain/vectorstores/usearch.py @@ -3,8 +3,8 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.base import AddableMixin, Docstore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/vald.py b/libs/langchain/langchain/vectorstores/vald.py index d7ca76a004e40..e0d4ebc1a3ccf 100644 --- a/libs/langchain/langchain/vectorstores/vald.py +++ b/libs/langchain/langchain/vectorstores/vald.py @@ -4,8 +4,8 @@ from typing import Any, Iterable, List, Optional, Tuple, Type import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/vearch.py b/libs/langchain/langchain/vectorstores/vearch.py index 67b2a1e84d815..bec007fcdc7b0 100644 --- a/libs/langchain/langchain/vectorstores/vearch.py +++ b/libs/langchain/langchain/vectorstores/vearch.py @@ -6,8 +6,8 @@ from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, Type import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/vectara.py b/libs/langchain/langchain/vectorstores/vectara.py index 85f92d9fe6a2c..73c1b998c2478 100644 --- a/libs/langchain/langchain/vectorstores/vectara.py +++ b/libs/langchain/langchain/vectorstores/vectara.py @@ -7,10 +7,10 @@ from typing import Any, Iterable, List, Optional, Tuple, Type import requests +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import Field -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore, VectorStoreRetriever +from langchain_core.vectorstores import VectorStore, VectorStoreRetriever logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/vectorstores/vespa.py b/libs/langchain/langchain/vectorstores/vespa.py index 31e6cf5c60e57..28f9ee1b4b7ba 100644 --- a/libs/langchain/langchain/vectorstores/vespa.py +++ b/libs/langchain/langchain/vectorstores/vespa.py @@ -2,7 +2,7 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.docstore.document import Document from langchain.vectorstores.base import VectorStore, VectorStoreRetriever diff --git a/libs/langchain/langchain/vectorstores/weaviate.py b/libs/langchain/langchain/vectorstores/weaviate.py index 9d33dd5df4cd9..3ef7b28fb59e7 100644 --- a/libs/langchain/langchain/vectorstores/weaviate.py +++ b/libs/langchain/langchain/vectorstores/weaviate.py @@ -15,8 +15,8 @@ from uuid import uuid4 import numpy as np -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document from langchain.vectorstores.utils import maximal_marginal_relevance diff --git a/libs/langchain/langchain/vectorstores/xata.py b/libs/langchain/langchain/vectorstores/xata.py index eec1b22dec303..e81bfe6b150ae 100644 --- a/libs/langchain/langchain/vectorstores/xata.py +++ b/libs/langchain/langchain/vectorstores/xata.py @@ -4,8 +4,8 @@ from itertools import repeat from typing import Any, Dict, Iterable, List, Optional, Tuple, Type -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/zep.py b/libs/langchain/langchain/vectorstores/zep.py index 395c6670b1e7c..45343977549ab 100644 --- a/libs/langchain/langchain/vectorstores/zep.py +++ b/libs/langchain/langchain/vectorstores/zep.py @@ -5,8 +5,8 @@ from dataclasses import asdict, dataclass from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.docstore.document import Document diff --git a/libs/langchain/langchain/vectorstores/zilliz.py b/libs/langchain/langchain/vectorstores/zilliz.py index 7d45cf8843621..2ca6e2b3ffaed 100644 --- a/libs/langchain/langchain/vectorstores/zilliz.py +++ b/libs/langchain/langchain/vectorstores/zilliz.py @@ -3,7 +3,7 @@ import logging from typing import Any, Dict, List, Optional -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.vectorstores.milvus import Milvus diff --git a/libs/langchain/tests/integration_tests/cache/test_cassandra.py b/libs/langchain/tests/integration_tests/cache/test_cassandra.py index 60700c732900a..3dc186ed8d599 100644 --- a/libs/langchain/tests/integration_tests/cache/test_cassandra.py +++ b/libs/langchain/tests/integration_tests/cache/test_cassandra.py @@ -4,7 +4,7 @@ from typing import Any, Iterator, Tuple import pytest -from langchain_core.schema import Generation, LLMResult +from langchain_core.outputs import Generation, LLMResult from langchain.cache import CassandraCache, CassandraSemanticCache from langchain.globals import get_llm_cache, set_llm_cache diff --git a/libs/langchain/tests/integration_tests/cache/test_gptcache.py b/libs/langchain/tests/integration_tests/cache/test_gptcache.py index 12d0430807c4d..7e1e0c6957d10 100644 --- a/libs/langchain/tests/integration_tests/cache/test_gptcache.py +++ b/libs/langchain/tests/integration_tests/cache/test_gptcache.py @@ -2,7 +2,7 @@ from typing import Any, Callable, Union import pytest -from langchain_core.schema import Generation +from langchain_core.outputs import Generation from langchain.cache import GPTCache from langchain.globals import get_llm_cache, set_llm_cache diff --git a/libs/langchain/tests/integration_tests/cache/test_momento_cache.py b/libs/langchain/tests/integration_tests/cache/test_momento_cache.py index ca551b6254818..f5ef26ba663a5 100644 --- a/libs/langchain/tests/integration_tests/cache/test_momento_cache.py +++ b/libs/langchain/tests/integration_tests/cache/test_momento_cache.py @@ -11,7 +11,7 @@ from typing import Iterator import pytest -from langchain_core.schema import Generation, LLMResult +from langchain_core.outputs import Generation, LLMResult from langchain.cache import MomentoCache from langchain.globals import set_llm_cache diff --git a/libs/langchain/tests/integration_tests/cache/test_redis_cache.py b/libs/langchain/tests/integration_tests/cache/test_redis_cache.py index a02670d3b6fad..26ad630c5aa71 100644 --- a/libs/langchain/tests/integration_tests/cache/test_redis_cache.py +++ b/libs/langchain/tests/integration_tests/cache/test_redis_cache.py @@ -3,11 +3,10 @@ from typing import List, cast import pytest +from langchain_core.embeddings import Embeddings from langchain_core.load.dump import dumps -from langchain_core.schema import Generation, LLMResult -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage -from langchain_core.schema.output import ChatGeneration +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, Generation, LLMResult from langchain.cache import RedisCache, RedisSemanticCache from langchain.globals import get_llm_cache, set_llm_cache diff --git a/libs/langchain/tests/integration_tests/cache/test_upstash_redis_cache.py b/libs/langchain/tests/integration_tests/cache/test_upstash_redis_cache.py index 1ed5ba98b2ada..6cd81eb066204 100644 --- a/libs/langchain/tests/integration_tests/cache/test_upstash_redis_cache.py +++ b/libs/langchain/tests/integration_tests/cache/test_upstash_redis_cache.py @@ -2,7 +2,7 @@ import uuid import pytest -from langchain_core.schema import Generation, LLMResult +from langchain_core.outputs import Generation, LLMResult import langchain from langchain.cache import UpstashRedisCache diff --git a/libs/langchain/tests/integration_tests/chat_models/test_anthropic.py b/libs/langchain/tests/integration_tests/chat_models/test_anthropic.py index ef632e8b1959e..cce21e39238a6 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_anthropic.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_anthropic.py @@ -2,8 +2,8 @@ from typing import List import pytest -from langchain_core.schema import ChatGeneration, LLMResult -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.anthropic import ( diff --git a/libs/langchain/tests/integration_tests/chat_models/test_azure_openai.py b/libs/langchain/tests/integration_tests/chat_models/test_azure_openai.py index 6af1d4606c100..ef911dc00c824 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_azure_openai.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_azure_openai.py @@ -3,12 +3,8 @@ from typing import Any import pytest -from langchain_core.schema import ( - ChatGeneration, - ChatResult, - LLMResult, -) -from langchain_core.schema.messages import BaseMessage, HumanMessage +from langchain_core.messages import BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models import AzureChatOpenAI diff --git a/libs/langchain/tests/integration_tests/chat_models/test_azureml_endpoint.py b/libs/langchain/tests/integration_tests/chat_models/test_azureml_endpoint.py index 8050eb3dc0d7a..929074866bd6f 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_azureml_endpoint.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_azureml_endpoint.py @@ -1,12 +1,7 @@ """Test AzureML Chat Endpoint wrapper.""" -from langchain_core.schema import ( - AIMessage, - BaseMessage, - ChatGeneration, - HumanMessage, - LLMResult, -) +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.chat_models.azureml_endpoint import ( AzureMLChatOnlineEndpoint, diff --git a/libs/langchain/tests/integration_tests/chat_models/test_baichuan.py b/libs/langchain/tests/integration_tests/chat_models/test_baichuan.py index 0dfbd1dabc986..d4689641155a5 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_baichuan.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_baichuan.py @@ -1,4 +1,4 @@ -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain.chat_models.baichuan import ChatBaichuan diff --git a/libs/langchain/tests/integration_tests/chat_models/test_bedrock.py b/libs/langchain/tests/integration_tests/chat_models/test_bedrock.py index 2b53ceaa33c8a..1750f53f73064 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_bedrock.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_bedrock.py @@ -2,8 +2,8 @@ from typing import Any import pytest -from langchain_core.schema import ChatGeneration, LLMResult -from langchain_core.schema.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models import BedrockChat diff --git a/libs/langchain/tests/integration_tests/chat_models/test_ernie.py b/libs/langchain/tests/integration_tests/chat_models/test_ernie.py index 214f55922e028..6db6321d1cf9a 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_ernie.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_ernie.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain.chat_models.ernie import ErnieBotChat diff --git a/libs/langchain/tests/integration_tests/chat_models/test_fireworks.py b/libs/langchain/tests/integration_tests/chat_models/test_fireworks.py index 55b11499584d1..fdffcddd0186d 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_fireworks.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_fireworks.py @@ -3,8 +3,8 @@ from typing import cast import pytest -from langchain_core.schema import ChatGeneration, ChatResult, LLMResult -from langchain_core.schema.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult from langchain.chat_models.fireworks import ChatFireworks diff --git a/libs/langchain/tests/integration_tests/chat_models/test_google_palm.py b/libs/langchain/tests/integration_tests/chat_models/test_google_palm.py index 25a0151b35532..3e2ae6eb046d2 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_google_palm.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_google_palm.py @@ -4,12 +4,8 @@ valid API key. """ -from langchain_core.schema import ( - ChatGeneration, - ChatResult, - LLMResult, -) -from langchain_core.schema.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult from langchain.chat_models import ChatGooglePalm diff --git a/libs/langchain/tests/integration_tests/chat_models/test_hunyuan.py b/libs/langchain/tests/integration_tests/chat_models/test_hunyuan.py index 59b0cc6362fd5..47b60864acf80 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_hunyuan.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_hunyuan.py @@ -1,4 +1,4 @@ -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain.chat_models.hunyuan import ChatHunyuan diff --git a/libs/langchain/tests/integration_tests/chat_models/test_jinachat.py b/libs/langchain/tests/integration_tests/chat_models/test_jinachat.py index f2d9c7cae57ec..8b7cdc129c1ab 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_jinachat.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_jinachat.py @@ -2,13 +2,8 @@ import pytest -from langchain_core.schema import ( - BaseMessage, - ChatGeneration, - HumanMessage, - LLMResult, - SystemMessage, -) +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.jinachat import JinaChat diff --git a/libs/langchain/tests/integration_tests/chat_models/test_konko.py b/libs/langchain/tests/integration_tests/chat_models/test_konko.py index ff49751d97017..56f479b302610 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_konko.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_konko.py @@ -2,12 +2,8 @@ from typing import Any import pytest -from langchain_core.schema import ( - ChatGeneration, - ChatResult, - LLMResult, -) -from langchain_core.schema.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.konko import ChatKonko diff --git a/libs/langchain/tests/integration_tests/chat_models/test_litellm.py b/libs/langchain/tests/integration_tests/chat_models/test_litellm.py index ba380a90b810a..571a287daafc5 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_litellm.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_litellm.py @@ -1,11 +1,8 @@ """Test Anthropic API wrapper.""" from typing import List -from langchain_core.schema import ( - ChatGeneration, - LLMResult, -) -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.callbacks.manager import ( CallbackManager, diff --git a/libs/langchain/tests/integration_tests/chat_models/test_openai.py b/libs/langchain/tests/integration_tests/chat_models/test_openai.py index a036c717c03e9..55977937f30f1 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_openai.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_openai.py @@ -2,15 +2,16 @@ from typing import Any, List, Optional, Union import pytest -from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate -from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import ( +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ( ChatGeneration, + ChatGenerationChunk, ChatResult, + GenerationChunk, LLMResult, ) -from langchain_core.schema.messages import BaseMessage, HumanMessage, SystemMessage -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk +from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate +from langchain_core.pydantic_v1 import BaseModel, Field from langchain.callbacks.base import AsyncCallbackHandler from langchain.callbacks.manager import CallbackManager diff --git a/libs/langchain/tests/integration_tests/chat_models/test_pai_eas_chat_endpoint.py b/libs/langchain/tests/integration_tests/chat_models/test_pai_eas_chat_endpoint.py index a2f519ce88a9b..0095a5e4a2898 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_pai_eas_chat_endpoint.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_pai_eas_chat_endpoint.py @@ -1,13 +1,8 @@ """Test AliCloud Pai Eas Chat Model.""" import os -from langchain_core.schema import ( - AIMessage, - BaseMessage, - ChatGeneration, - HumanMessage, - LLMResult, -) +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.pai_eas_endpoint import PaiEasChatEndpoint diff --git a/libs/langchain/tests/integration_tests/chat_models/test_promptlayer_openai.py b/libs/langchain/tests/integration_tests/chat_models/test_promptlayer_openai.py index f993c6a22ffc2..3622701d08f45 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_promptlayer_openai.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_promptlayer_openai.py @@ -1,12 +1,8 @@ """Test PromptLayerChatOpenAI wrapper.""" import pytest -from langchain_core.schema import ( - ChatGeneration, - ChatResult, - LLMResult, -) -from langchain_core.schema.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.promptlayer_openai import PromptLayerChatOpenAI diff --git a/libs/langchain/tests/integration_tests/chat_models/test_qianfan_endpoint.py b/libs/langchain/tests/integration_tests/chat_models/test_qianfan_endpoint.py index 8a4547ff6aedf..fe22694f97dfd 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_qianfan_endpoint.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_qianfan_endpoint.py @@ -2,15 +2,14 @@ from typing import Any -from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate -from langchain_core.schema import ( +from langchain_core.messages import ( AIMessage, BaseMessage, - ChatGeneration, FunctionMessage, HumanMessage, - LLMResult, ) +from langchain_core.outputs import ChatGeneration, LLMResult +from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate from langchain.callbacks.manager import CallbackManager from langchain.chains.openai_functions import ( diff --git a/libs/langchain/tests/integration_tests/chat_models/test_tongyi.py b/libs/langchain/tests/integration_tests/chat_models/test_tongyi.py index 42519c248db96..a743cdb16a704 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_tongyi.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_tongyi.py @@ -1,12 +1,7 @@ """Test Alibaba Tongyi Chat Model.""" -from langchain_core.schema import ( - AIMessage, - BaseMessage, - ChatGeneration, - HumanMessage, - LLMResult, -) +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.tongyi import ChatTongyi diff --git a/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py b/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py index cc91d522677c7..47cd280b8e821 100644 --- a/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py +++ b/libs/langchain/tests/integration_tests/chat_models/test_vertexai.py @@ -11,8 +11,8 @@ from unittest.mock import MagicMock, Mock, patch import pytest -from langchain_core.schema import LLMResult -from langchain_core.schema.messages import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage +from langchain_core.outputs import LLMResult from langchain.chat_models import ChatVertexAI from langchain.chat_models.vertexai import _parse_chat_history, _parse_examples diff --git a/libs/langchain/tests/integration_tests/document_loaders/parsers/test_docai.py b/libs/langchain/tests/integration_tests/document_loaders/parsers/test_docai.py index 66c2352277d77..1d576033cd459 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/parsers/test_docai.py +++ b/libs/langchain/tests/integration_tests/document_loaders/parsers/test_docai.py @@ -6,7 +6,7 @@ """ import os -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.blob_loaders import Blob from langchain.document_loaders.parsers import DocAIParser diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_arxiv.py b/libs/langchain/tests/integration_tests/document_loaders/test_arxiv.py index f3f44032835ca..5cbf8957672f6 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_arxiv.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_arxiv.py @@ -1,7 +1,7 @@ from typing import List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.arxiv import ArxivLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_dataframe.py b/libs/langchain/tests/integration_tests/document_loaders/test_dataframe.py index 74e91bcb05f4e..c1be686995b56 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_dataframe.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_dataframe.py @@ -1,6 +1,6 @@ import pandas as pd import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders import DataFrameLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_geodataframe.py b/libs/langchain/tests/integration_tests/document_loaders/test_geodataframe.py index b4e6d45dca4ff..9417a47844283 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_geodataframe.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_geodataframe.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders import GeoDataFrameLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_polars_dataframe.py b/libs/langchain/tests/integration_tests/document_loaders/test_polars_dataframe.py index f1743b451da8d..80ca3892115c9 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_polars_dataframe.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_polars_dataframe.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders import PolarsDataFrameLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_pubmed.py b/libs/langchain/tests/integration_tests/document_loaders/test_pubmed.py index 9f2da3e934b6d..45a23157f9fca 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_pubmed.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_pubmed.py @@ -2,7 +2,7 @@ from typing import List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders import PubMedLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_quip.py b/libs/langchain/tests/integration_tests/document_loaders/test_quip.py index 8e059714ce52c..6d4ea44689171 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_quip.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_quip.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock, patch import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.quip import QuipLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_tensorflow_datasets.py b/libs/langchain/tests/integration_tests/document_loaders/test_tensorflow_datasets.py index b69bebd0afe2c..82c3dc8780947 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_tensorflow_datasets.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_tensorflow_datasets.py @@ -4,8 +4,8 @@ from typing import TYPE_CHECKING import pytest +from langchain_core.documents import Document from langchain_core.pydantic_v1 import ValidationError -from langchain_core.schema.document import Document from langchain.document_loaders.tensorflow_datasets import TensorflowDatasetLoader diff --git a/libs/langchain/tests/integration_tests/document_loaders/test_xorbits.py b/libs/langchain/tests/integration_tests/document_loaders/test_xorbits.py index dce596cb97d09..9f6407e10626b 100644 --- a/libs/langchain/tests/integration_tests/document_loaders/test_xorbits.py +++ b/libs/langchain/tests/integration_tests/document_loaders/test_xorbits.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders import XorbitsLoader diff --git a/libs/langchain/tests/integration_tests/llms/test_anthropic.py b/libs/langchain/tests/integration_tests/llms/test_anthropic.py index 95c82b900a318..042b607877b58 100644 --- a/libs/langchain/tests/integration_tests/llms/test_anthropic.py +++ b/libs/langchain/tests/integration_tests/llms/test_anthropic.py @@ -2,7 +2,7 @@ from typing import Generator import pytest -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks.manager import CallbackManager from langchain.llms.anthropic import Anthropic diff --git a/libs/langchain/tests/integration_tests/llms/test_azure_openai.py b/libs/langchain/tests/integration_tests/llms/test_azure_openai.py index 6aac97e483b84..593d3a20b79ce 100644 --- a/libs/langchain/tests/integration_tests/llms/test_azure_openai.py +++ b/libs/langchain/tests/integration_tests/llms/test_azure_openai.py @@ -3,9 +3,7 @@ from typing import Any, Generator import pytest -from langchain_core.schema import ( - LLMResult, -) +from langchain_core.outputs import LLMResult from langchain.callbacks.manager import CallbackManager from langchain.llms import AzureOpenAI diff --git a/libs/langchain/tests/integration_tests/llms/test_chatglm.py b/libs/langchain/tests/integration_tests/llms/test_chatglm.py index fca5ca34ebcb6..3a15b9f2e6891 100644 --- a/libs/langchain/tests/integration_tests/llms/test_chatglm.py +++ b/libs/langchain/tests/integration_tests/llms/test_chatglm.py @@ -1,5 +1,5 @@ """Test ChatGLM API wrapper.""" -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.llms.chatglm import ChatGLM diff --git a/libs/langchain/tests/integration_tests/llms/test_fireworks.py b/libs/langchain/tests/integration_tests/llms/test_fireworks.py index 81bcfecfcb012..bf34e0cf950f2 100644 --- a/libs/langchain/tests/integration_tests/llms/test_fireworks.py +++ b/libs/langchain/tests/integration_tests/llms/test_fireworks.py @@ -3,12 +3,12 @@ from typing import Generator import pytest +from langchain_core.outputs import LLMResult from langchain_core.prompts import PromptTemplate from langchain_core.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, ) -from langchain_core.schema import LLMResult from langchain.chains import LLMChain from langchain.llms.fireworks import Fireworks diff --git a/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py b/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py index 9fd257780931d..e6418f426e70e 100644 --- a/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py +++ b/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py @@ -1,6 +1,6 @@ +from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import PromptTemplate from langchain_core.runnables import RunnableParallel -from langchain_core.schema.output_parser import StrOutputParser import langchain.utilities.opaqueprompts as op from langchain.chains.llm import LLMChain diff --git a/libs/langchain/tests/integration_tests/llms/test_openai.py b/libs/langchain/tests/integration_tests/llms/test_openai.py index 588ea601cefdd..84de53a8d6100 100644 --- a/libs/langchain/tests/integration_tests/llms/test_openai.py +++ b/libs/langchain/tests/integration_tests/llms/test_openai.py @@ -3,7 +3,7 @@ from typing import Generator import pytest -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks.manager import CallbackManager from langchain.chat_models.openai import ChatOpenAI diff --git a/libs/langchain/tests/integration_tests/llms/test_qianfan_endpoint.py b/libs/langchain/tests/integration_tests/llms/test_qianfan_endpoint.py index e46a6342a0005..621e1ad2f567a 100644 --- a/libs/langchain/tests/integration_tests/llms/test_qianfan_endpoint.py +++ b/libs/langchain/tests/integration_tests/llms/test_qianfan_endpoint.py @@ -1,7 +1,7 @@ """Test Baidu Qianfan LLM Endpoint.""" from typing import Generator -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint diff --git a/libs/langchain/tests/integration_tests/llms/test_tongyi.py b/libs/langchain/tests/integration_tests/llms/test_tongyi.py index 704d994a524a0..83ef33d98fcf2 100644 --- a/libs/langchain/tests/integration_tests/llms/test_tongyi.py +++ b/libs/langchain/tests/integration_tests/llms/test_tongyi.py @@ -1,5 +1,5 @@ """Test Tongyi API wrapper.""" -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.llms.tongyi import Tongyi diff --git a/libs/langchain/tests/integration_tests/llms/test_vertexai.py b/libs/langchain/tests/integration_tests/llms/test_vertexai.py index a89d299c0aed8..ef9c8fb1b5386 100644 --- a/libs/langchain/tests/integration_tests/llms/test_vertexai.py +++ b/libs/langchain/tests/integration_tests/llms/test_vertexai.py @@ -10,7 +10,7 @@ import os import pytest -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from pytest_mock import MockerFixture from langchain.chains.summarize import load_summarize_chain diff --git a/libs/langchain/tests/integration_tests/memory/chat_message_histories/test_zep.py b/libs/langchain/tests/integration_tests/memory/chat_message_histories/test_zep.py index 157dd7bc3d7b0..fa8e47b1565ab 100644 --- a/libs/langchain/tests/integration_tests/memory/chat_message_histories/test_zep.py +++ b/libs/langchain/tests/integration_tests/memory/chat_message_histories/test_zep.py @@ -1,7 +1,7 @@ from typing import TYPE_CHECKING import pytest -from langchain_core.schema.messages import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from pytest_mock import MockerFixture from langchain.memory.chat_message_histories import ZepChatMessageHistory diff --git a/libs/langchain/tests/integration_tests/memory/test_cassandra.py b/libs/langchain/tests/integration_tests/memory/test_cassandra.py index d10e3ee5a36e9..ec79f102bf850 100644 --- a/libs/langchain/tests/integration_tests/memory/test_cassandra.py +++ b/libs/langchain/tests/integration_tests/memory/test_cassandra.py @@ -3,7 +3,7 @@ from typing import Optional from cassandra.cluster import Cluster -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories.cassandra import ( diff --git a/libs/langchain/tests/integration_tests/memory/test_cosmos_db.py b/libs/langchain/tests/integration_tests/memory/test_cosmos_db.py index ea927c97f36c3..ed119353afc0b 100644 --- a/libs/langchain/tests/integration_tests/memory/test_cosmos_db.py +++ b/libs/langchain/tests/integration_tests/memory/test_cosmos_db.py @@ -1,7 +1,7 @@ import json import os -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import CosmosDBChatMessageHistory @@ -34,7 +34,7 @@ def test_memory_with_message_store() -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_elasticsearch.py b/libs/langchain/tests/integration_tests/memory/test_elasticsearch.py index 46694dfee17ff..8374dd7beb2b3 100644 --- a/libs/langchain/tests/integration_tests/memory/test_elasticsearch.py +++ b/libs/langchain/tests/integration_tests/memory/test_elasticsearch.py @@ -4,7 +4,7 @@ from typing import Generator, Union import pytest -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import ElasticsearchChatMessageHistory @@ -80,7 +80,7 @@ def test_memory_with_message_store( # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_firestore.py b/libs/langchain/tests/integration_tests/memory/test_firestore.py index b75802be84818..e7c6b398b0581 100644 --- a/libs/langchain/tests/integration_tests/memory/test_firestore.py +++ b/libs/langchain/tests/integration_tests/memory/test_firestore.py @@ -1,6 +1,6 @@ import json -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import FirestoreChatMessageHistory @@ -33,7 +33,7 @@ def test_memory_with_message_store() -> None: memory_key="baz", chat_memory=message_history, return_messages=True ) messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_momento.py b/libs/langchain/tests/integration_tests/memory/test_momento.py index 2cb928d8f36e6..ea28c55c0bd74 100644 --- a/libs/langchain/tests/integration_tests/memory/test_momento.py +++ b/libs/langchain/tests/integration_tests/memory/test_momento.py @@ -10,7 +10,7 @@ from typing import Iterator import pytest -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import MomentoChatMessageHistory @@ -61,7 +61,7 @@ def test_memory_with_message_store(message_history: MomentoChatMessageHistory) - # Verify that the messages are in the store messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_mongodb.py b/libs/langchain/tests/integration_tests/memory/test_mongodb.py index 6fb7c1a8b2b8b..e2a2d8421b420 100644 --- a/libs/langchain/tests/integration_tests/memory/test_mongodb.py +++ b/libs/langchain/tests/integration_tests/memory/test_mongodb.py @@ -1,7 +1,7 @@ import json import os -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import MongoDBChatMessageHistory @@ -26,7 +26,7 @@ def test_memory_with_message_store() -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_neo4j.py b/libs/langchain/tests/integration_tests/memory/test_neo4j.py index 9ee5d3072c26e..aa2fe30adec5a 100644 --- a/libs/langchain/tests/integration_tests/memory/test_neo4j.py +++ b/libs/langchain/tests/integration_tests/memory/test_neo4j.py @@ -1,6 +1,6 @@ import json -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import Neo4jChatMessageHistory @@ -20,7 +20,7 @@ def test_memory_with_message_store() -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_redis.py b/libs/langchain/tests/integration_tests/memory/test_redis.py index 308ad0e7d56fb..38ef78175c5be 100644 --- a/libs/langchain/tests/integration_tests/memory/test_redis.py +++ b/libs/langchain/tests/integration_tests/memory/test_redis.py @@ -1,6 +1,6 @@ import json -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import RedisChatMessageHistory @@ -22,7 +22,7 @@ def test_memory_with_message_store() -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_rockset.py b/libs/langchain/tests/integration_tests/memory/test_rockset.py index 2817aefe3e047..7d6a9d62dbbd2 100644 --- a/libs/langchain/tests/integration_tests/memory/test_rockset.py +++ b/libs/langchain/tests/integration_tests/memory/test_rockset.py @@ -8,7 +8,7 @@ import json import os -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import RocksetChatMessageHistory @@ -54,7 +54,7 @@ def test_memory_with_message_store(self) -> None: memory.chat_memory.add_user_message("This is me, the human") messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_singlestoredb.py b/libs/langchain/tests/integration_tests/memory/test_singlestoredb.py index b41c7837703c8..94bb0e7197f53 100644 --- a/libs/langchain/tests/integration_tests/memory/test_singlestoredb.py +++ b/libs/langchain/tests/integration_tests/memory/test_singlestoredb.py @@ -1,6 +1,6 @@ import json -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory, SingleStoreDBChatMessageHistory @@ -25,7 +25,7 @@ def test_memory_with_message_store() -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_upstash_redis.py b/libs/langchain/tests/integration_tests/memory/test_upstash_redis.py index dfc2746b581d1..eed8b094f9daa 100644 --- a/libs/langchain/tests/integration_tests/memory/test_upstash_redis.py +++ b/libs/langchain/tests/integration_tests/memory/test_upstash_redis.py @@ -1,7 +1,7 @@ import json import pytest -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories.upstash_redis import ( @@ -29,7 +29,7 @@ def test_memory_with_message_store() -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/memory/test_xata.py b/libs/langchain/tests/integration_tests/memory/test_xata.py index 7b74142fd33e3..74691b485dd5d 100644 --- a/libs/langchain/tests/integration_tests/memory/test_xata.py +++ b/libs/langchain/tests/integration_tests/memory/test_xata.py @@ -6,7 +6,7 @@ import json import os -from langchain_core.schema.messages import _message_to_dict +from langchain_core.messages import message_to_dict from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import XataChatMessageHistory @@ -33,7 +33,7 @@ def test_xata_chat_memory(self) -> None: # get the message history from the memory store and turn it into a json messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) assert "This is me, the AI" in messages_json assert "This is me, the human" in messages_json diff --git a/libs/langchain/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py b/libs/langchain/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py index 61c976e4faa5a..401042bec9cb0 100644 --- a/libs/langchain/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py +++ b/libs/langchain/tests/integration_tests/prompts/test_ngram_overlap_example_selector.py @@ -1,7 +1,7 @@ """Test functionality related to ngram overlap based selector.""" import pytest -from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.prompts import PromptTemplate from langchain.prompts.example_selector.ngram_overlap import ( NGramOverlapExampleSelector, diff --git a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_base.py b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_base.py index 709378b8e9cf9..b251a55ac1657 100644 --- a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_base.py +++ b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_base.py @@ -1,5 +1,5 @@ """Integration test for compression pipelines.""" -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_transformers import EmbeddingsRedundantFilter from langchain.embeddings import OpenAIEmbeddings diff --git a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py index 4c03bfa5bcb1e..67c6550109c08 100644 --- a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py +++ b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_extract.py @@ -1,5 +1,5 @@ """Integration test for LLMChainExtractor.""" -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.chat_models import ChatOpenAI from langchain.retrievers.document_compressors import LLMChainExtractor diff --git a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py index 4891a56e5eaf1..8c9649b6b91cf 100644 --- a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py +++ b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_chain_filter.py @@ -1,5 +1,5 @@ """Integration test for llm-based relevant doc filtering.""" -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.chat_models import ChatOpenAI from langchain.retrievers.document_compressors import LLMChainFilter diff --git a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py index ae45cf5cf73b9..69e70e89a74b8 100644 --- a/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py +++ b/libs/langchain/tests/integration_tests/retrievers/document_compressors/test_embeddings_filter.py @@ -1,6 +1,6 @@ """Integration test for embedding-based relevant doc filtering.""" import numpy as np -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_transformers.embeddings_redundant_filter import ( _DocumentWithState, diff --git a/libs/langchain/tests/integration_tests/retrievers/test_arxiv.py b/libs/langchain/tests/integration_tests/retrievers/test_arxiv.py index b2f557a7d8d64..1dccc932d1cd3 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_arxiv.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_arxiv.py @@ -2,7 +2,7 @@ from typing import List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers import ArxivRetriever diff --git a/libs/langchain/tests/integration_tests/retrievers/test_azure_cognitive_search.py b/libs/langchain/tests/integration_tests/retrievers/test_azure_cognitive_search.py index 84694415ff5ef..2d85a000183d5 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_azure_cognitive_search.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_azure_cognitive_search.py @@ -1,5 +1,5 @@ """Test Azure Cognitive Search wrapper.""" -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers.azure_cognitive_search import AzureCognitiveSearchRetriever diff --git a/libs/langchain/tests/integration_tests/retrievers/test_google_docai_warehoure_retriever.py b/libs/langchain/tests/integration_tests/retrievers/test_google_docai_warehoure_retriever.py index 08de37612423f..6dec17070b76e 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_google_docai_warehoure_retriever.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_google_docai_warehoure_retriever.py @@ -1,7 +1,7 @@ """Test Google Cloud Document AI Warehouse retriever.""" import os -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers import GoogleDocumentAIWarehouseRetriever diff --git a/libs/langchain/tests/integration_tests/retrievers/test_google_vertex_ai_search.py b/libs/langchain/tests/integration_tests/retrievers/test_google_vertex_ai_search.py index 3bc2937b5cf1a..2d1eb772ba790 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_google_vertex_ai_search.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_google_vertex_ai_search.py @@ -14,7 +14,7 @@ import os import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers.google_vertex_ai_search import ( GoogleCloudEnterpriseSearchRetriever, diff --git a/libs/langchain/tests/integration_tests/retrievers/test_kay.py b/libs/langchain/tests/integration_tests/retrievers/test_kay.py index 73754b1c7b905..aaf46bf013be8 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_kay.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_kay.py @@ -1,6 +1,6 @@ """Integration test for Kay.ai API Wrapper.""" import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers import KayAiRetriever diff --git a/libs/langchain/tests/integration_tests/retrievers/test_pubmed.py b/libs/langchain/tests/integration_tests/retrievers/test_pubmed.py index c0a5a37b9c681..043944d5f801c 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_pubmed.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_pubmed.py @@ -2,7 +2,7 @@ from typing import List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers import PubMedRetriever diff --git a/libs/langchain/tests/integration_tests/retrievers/test_wikipedia.py b/libs/langchain/tests/integration_tests/retrievers/test_wikipedia.py index 2188e43870763..f911c219c5194 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_wikipedia.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_wikipedia.py @@ -2,7 +2,7 @@ from typing import List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers import WikipediaRetriever diff --git a/libs/langchain/tests/integration_tests/retrievers/test_zep.py b/libs/langchain/tests/integration_tests/retrievers/test_zep.py index 1864799750b3b..3989f98b5b5de 100644 --- a/libs/langchain/tests/integration_tests/retrievers/test_zep.py +++ b/libs/langchain/tests/integration_tests/retrievers/test_zep.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from pytest_mock import MockerFixture from langchain.retrievers import ZepRetriever diff --git a/libs/langchain/tests/integration_tests/smith/evaluation/test_runner_utils.py b/libs/langchain/tests/integration_tests/smith/evaluation/test_runner_utils.py index 6cd52ef15e617..0560d1cbe6a48 100644 --- a/libs/langchain/tests/integration_tests/smith/evaluation/test_runner_utils.py +++ b/libs/langchain/tests/integration_tests/smith/evaluation/test_runner_utils.py @@ -2,8 +2,8 @@ from uuid import uuid4 import pytest +from langchain_core.messages import BaseMessage, HumanMessage from langchain_core.prompts.chat import ChatPromptTemplate -from langchain_core.schema.messages import BaseMessage, HumanMessage from langsmith import Client as Client from langsmith.schemas import DataType diff --git a/libs/langchain/tests/integration_tests/test_document_transformers.py b/libs/langchain/tests/integration_tests/test_document_transformers.py index 63a2f1e9a46a0..fb13d3c34ec99 100644 --- a/libs/langchain/tests/integration_tests/test_document_transformers.py +++ b/libs/langchain/tests/integration_tests/test_document_transformers.py @@ -1,5 +1,5 @@ """Integration test for embedding-based redundant doc filtering.""" -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_transformers.embeddings_redundant_filter import ( EmbeddingsClusteringFilter, diff --git a/libs/langchain/tests/integration_tests/test_nuclia_transformer.py b/libs/langchain/tests/integration_tests/test_nuclia_transformer.py index a82390a59a8a2..902cf4d2ec4c4 100644 --- a/libs/langchain/tests/integration_tests/test_nuclia_transformer.py +++ b/libs/langchain/tests/integration_tests/test_nuclia_transformer.py @@ -3,7 +3,7 @@ from typing import Any from unittest import mock -from langchain_core.schema.document import Document +from langchain_core.documents import Document from langchain.document_transformers.nuclia_text_transform import NucliaTextTransformer from langchain.tools.nuclia.tool import NucliaUnderstandingAPI diff --git a/libs/langchain/tests/integration_tests/test_schema.py b/libs/langchain/tests/integration_tests/test_schema.py index 7d6d786503309..1de13f2304d6c 100644 --- a/libs/langchain/tests/integration_tests/test_schema.py +++ b/libs/langchain/tests/integration_tests/test_schema.py @@ -1,6 +1,6 @@ """Test formatting functionality.""" -from langchain_core.schema.language_model import _get_token_ids_default_method +from langchain_core.language_models.base import _get_token_ids_default_method class TestTokenCountingWithGPT2Tokenizer: diff --git a/libs/langchain/tests/integration_tests/utilities/test_arxiv.py b/libs/langchain/tests/integration_tests/utilities/test_arxiv.py index 59d1bed435ca1..b1ca9de7330f3 100644 --- a/libs/langchain/tests/integration_tests/utilities/test_arxiv.py +++ b/libs/langchain/tests/integration_tests/utilities/test_arxiv.py @@ -2,7 +2,7 @@ from typing import Any, List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.agents.load_tools import load_tools from langchain.tools import ArxivQueryRun diff --git a/libs/langchain/tests/integration_tests/utilities/test_pubmed.py b/libs/langchain/tests/integration_tests/utilities/test_pubmed.py index 75a74398a722a..52c8d8cdf888b 100644 --- a/libs/langchain/tests/integration_tests/utilities/test_pubmed.py +++ b/libs/langchain/tests/integration_tests/utilities/test_pubmed.py @@ -2,7 +2,7 @@ from typing import Any, List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.agents.load_tools import load_tools from langchain.tools import PubmedQueryRun diff --git a/libs/langchain/tests/integration_tests/utilities/test_tensorflow_datasets.py b/libs/langchain/tests/integration_tests/utilities/test_tensorflow_datasets.py index a2883c46c1789..90cd514844dd9 100644 --- a/libs/langchain/tests/integration_tests/utilities/test_tensorflow_datasets.py +++ b/libs/langchain/tests/integration_tests/utilities/test_tensorflow_datasets.py @@ -4,8 +4,8 @@ from typing import TYPE_CHECKING import pytest +from langchain_core.documents import Document from langchain_core.pydantic_v1 import ValidationError -from langchain_core.schema.document import Document from langchain.utilities.tensorflow_datasets import TensorflowDatasets diff --git a/libs/langchain/tests/integration_tests/utilities/test_wikipedia_api.py b/libs/langchain/tests/integration_tests/utilities/test_wikipedia_api.py index e4461403b3ec6..1041fdb5a7b6e 100644 --- a/libs/langchain/tests/integration_tests/utilities/test_wikipedia_api.py +++ b/libs/langchain/tests/integration_tests/utilities/test_wikipedia_api.py @@ -2,7 +2,7 @@ from typing import List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.utilities import WikipediaAPIWrapper diff --git a/libs/langchain/tests/integration_tests/vectorstores/conftest.py b/libs/langchain/tests/integration_tests/vectorstores/conftest.py index 2af17fa945d22..5899e4ec146e0 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/conftest.py +++ b/libs/langchain/tests/integration_tests/vectorstores/conftest.py @@ -2,7 +2,7 @@ from typing import Generator, List, Union import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from vcr.request import Request from langchain.document_loaders import TextLoader diff --git a/libs/langchain/tests/integration_tests/vectorstores/docarray/test_hnsw.py b/libs/langchain/tests/integration_tests/vectorstores/docarray/test_hnsw.py index 39ef184a19cec..862efab2aaa2f 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/docarray/test_hnsw.py +++ b/libs/langchain/tests/integration_tests/vectorstores/docarray/test_hnsw.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores.docarray import DocArrayHnswSearch from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings diff --git a/libs/langchain/tests/integration_tests/vectorstores/docarray/test_in_memory.py b/libs/langchain/tests/integration_tests/vectorstores/docarray/test_in_memory.py index 437f9f76dd8d3..1a1a39caa03da 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/docarray/test_in_memory.py +++ b/libs/langchain/tests/integration_tests/vectorstores/docarray/test_in_memory.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores.docarray import DocArrayInMemorySearch from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings diff --git a/libs/langchain/tests/integration_tests/vectorstores/fake_embeddings.py b/libs/langchain/tests/integration_tests/vectorstores/fake_embeddings.py index d5d2607244884..7b99c696444af 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/fake_embeddings.py +++ b/libs/langchain/tests/integration_tests/vectorstores/fake_embeddings.py @@ -2,7 +2,7 @@ import math from typing import List -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings fake_texts = ["foo", "bar", "baz"] diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_from_texts.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_from_texts.py index f3c27d27bffaa..4c0d00e4f62ce 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_from_texts.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_from_texts.py @@ -2,7 +2,7 @@ from typing import Optional import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from langchain.vectorstores.qdrant import QdrantException diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_max_marginal_relevance.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_max_marginal_relevance.py index 22683b1ee1f4c..f92be8148b9be 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_max_marginal_relevance.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_max_marginal_relevance.py @@ -1,7 +1,7 @@ from typing import Optional import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from tests.integration_tests.vectorstores.fake_embeddings import ( diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_similarity_search.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_similarity_search.py index cfdf98aa45f71..5fa2a9e9db83c 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_similarity_search.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/async_api/test_similarity_search.py @@ -2,7 +2,7 @@ import numpy as np import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from tests.integration_tests.vectorstores.fake_embeddings import ( diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_add_texts.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_add_texts.py index 315e36d335876..f1d1ec89b264f 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_add_texts.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_add_texts.py @@ -2,7 +2,7 @@ from typing import Optional import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from tests.integration_tests.vectorstores.fake_embeddings import ( diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_embedding_interface.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_embedding_interface.py index f788e474a13a8..c3ed270f13182 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_embedding_interface.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_embedding_interface.py @@ -2,7 +2,7 @@ from typing import Callable, Optional import pytest -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.vectorstores import Qdrant from tests.integration_tests.vectorstores.fake_embeddings import ( diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_from_texts.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_from_texts.py index a9088170d9548..630598078beac 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_from_texts.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_from_texts.py @@ -3,7 +3,7 @@ from typing import Optional import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from langchain.vectorstores.qdrant import QdrantException diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_max_marginal_relevance.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_max_marginal_relevance.py index 56a84c38db504..b7d6cf6505677 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_max_marginal_relevance.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_max_marginal_relevance.py @@ -1,7 +1,7 @@ from typing import Optional import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from tests.integration_tests.vectorstores.fake_embeddings import ( diff --git a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_similarity_search.py b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_similarity_search.py index beab6b3ad8ba8..c10db8638fc33 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_similarity_search.py +++ b/libs/langchain/tests/integration_tests/vectorstores/qdrant/test_similarity_search.py @@ -2,7 +2,7 @@ import numpy as np import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import Qdrant from tests.integration_tests.vectorstores.fake_embeddings import ( diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_alibabacloud_opensearch.py b/libs/langchain/tests/integration_tests/vectorstores/test_alibabacloud_opensearch.py index 87d43d8f2f6c3..57a24553e6efd 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_alibabacloud_opensearch.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_alibabacloud_opensearch.py @@ -1,7 +1,7 @@ import time from typing import List -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores.alibabacloud_opensearch import ( AlibabaCloudOpenSearch, diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_astradb.py b/libs/langchain/tests/integration_tests/vectorstores/test_astradb.py index ab4ec88076b4c..d8f4fb494e812 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_astradb.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_astradb.py @@ -17,7 +17,7 @@ from typing import Iterable, List import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.embeddings.base import Embeddings from langchain.vectorstores import AstraDB diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_dashvector.py b/libs/langchain/tests/integration_tests/vectorstores/test_dashvector.py index 23f0c3d3cc1ef..d8c5040bb545c 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_dashvector.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_dashvector.py @@ -1,6 +1,6 @@ from time import sleep -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.vectorstores import DashVector from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_mongodb_atlas.py b/libs/langchain/tests/integration_tests/vectorstores/test_mongodb_atlas.py index be9532eab2955..a8cf5a13584a7 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_mongodb_atlas.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_mongodb_atlas.py @@ -6,7 +6,7 @@ from typing import Any import pytest -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.docstore.document import Document from langchain.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch diff --git a/libs/langchain/tests/integration_tests/vectorstores/test_zep.py b/libs/langchain/tests/integration_tests/vectorstores/test_zep.py index 10266370fbc64..bdc1c13ee1119 100644 --- a/libs/langchain/tests/integration_tests/vectorstores/test_zep.py +++ b/libs/langchain/tests/integration_tests/vectorstores/test_zep.py @@ -5,7 +5,7 @@ from uuid import uuid4 import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from pytest_mock import MockerFixture from langchain.vectorstores import ZepVectorStore diff --git a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log.py b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log.py index 8bf376e55dd2e..edcd1b59139a0 100644 --- a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log.py +++ b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log.py @@ -1,4 +1,4 @@ -from langchain_core.schema.agent import AgentAction +from langchain_core.agents import AgentAction from langchain.agents.format_scratchpad.log import format_log_to_str diff --git a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log_to_messages.py b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log_to_messages.py index 2648481d6ca4a..deb6eaceed02b 100644 --- a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log_to_messages.py +++ b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_log_to_messages.py @@ -1,5 +1,5 @@ -from langchain_core.schema.agent import AgentAction -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.agents import AgentAction +from langchain_core.messages import AIMessage, HumanMessage from langchain.agents.format_scratchpad.log_to_messages import format_log_to_messages diff --git a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_openai_functions.py b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_openai_functions.py index bc4350444f0c7..031721fc24763 100644 --- a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_openai_functions.py +++ b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_openai_functions.py @@ -1,5 +1,5 @@ -from langchain_core.schema.agent import AgentActionMessageLog -from langchain_core.schema.messages import AIMessage, FunctionMessage +from langchain_core.agents import AgentActionMessageLog +from langchain_core.messages import AIMessage, FunctionMessage from langchain.agents.format_scratchpad.openai_functions import ( format_to_openai_function_messages, diff --git a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_xml.py b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_xml.py index 9c08040e90560..322b6874bef78 100644 --- a/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_xml.py +++ b/libs/langchain/tests/unit_tests/agents/format_scratchpad/test_xml.py @@ -1,4 +1,4 @@ -from langchain_core.schema.agent import AgentAction +from langchain_core.agents import AgentAction from langchain.agents.format_scratchpad.xml import format_xml diff --git a/libs/langchain/tests/unit_tests/agents/output_parsers/test_json.py b/libs/langchain/tests/unit_tests/agents/output_parsers/test_json.py index e25cca3989b21..972ce093ad32c 100644 --- a/libs/langchain/tests/unit_tests/agents/output_parsers/test_json.py +++ b/libs/langchain/tests/unit_tests/agents/output_parsers/test_json.py @@ -1,4 +1,4 @@ -from langchain_core.schema.agent import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain.agents.output_parsers.json import JSONAgentOutputParser diff --git a/libs/langchain/tests/unit_tests/agents/output_parsers/test_openai_functions.py b/libs/langchain/tests/unit_tests/agents/output_parsers/test_openai_functions.py index 53713e81730c8..ae1ed2484736c 100644 --- a/libs/langchain/tests/unit_tests/agents/output_parsers/test_openai_functions.py +++ b/libs/langchain/tests/unit_tests/agents/output_parsers/test_openai_functions.py @@ -1,7 +1,10 @@ import pytest -from langchain_core.schema import AgentFinish, OutputParserException -from langchain_core.schema.agent import AgentActionMessageLog -from langchain_core.schema.messages import AIMessage, SystemMessage +from langchain_core.agents import ( + AgentActionMessageLog, + AgentFinish, +) +from langchain_core.exceptions import OutputParserException +from langchain_core.messages import AIMessage, SystemMessage from langchain.agents.output_parsers.openai_functions import ( OpenAIFunctionsAgentOutputParser, diff --git a/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_json_single_input.py b/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_json_single_input.py index 86ee44649251d..935ff30174747 100644 --- a/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_json_single_input.py +++ b/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_json_single_input.py @@ -1,4 +1,4 @@ -from langchain_core.schema.agent import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain.agents.output_parsers.react_json_single_input import ( ReActJsonSingleInputOutputParser, diff --git a/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_single_input.py b/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_single_input.py index f3cb2e5672194..07365ccb5d3ab 100644 --- a/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_single_input.py +++ b/libs/langchain/tests/unit_tests/agents/output_parsers/test_react_single_input.py @@ -1,6 +1,6 @@ import pytest -from langchain_core.schema.agent import AgentAction, AgentFinish -from langchain_core.schema.output_parser import OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.output_parsers.react_single_input import ( ReActSingleInputOutputParser, diff --git a/libs/langchain/tests/unit_tests/agents/output_parsers/test_self_ask.py b/libs/langchain/tests/unit_tests/agents/output_parsers/test_self_ask.py index 5902c5c6d9b7d..c5d5658b064b8 100644 --- a/libs/langchain/tests/unit_tests/agents/output_parsers/test_self_ask.py +++ b/libs/langchain/tests/unit_tests/agents/output_parsers/test_self_ask.py @@ -1,4 +1,4 @@ -from langchain_core.schema.agent import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain.agents.output_parsers.self_ask import SelfAskOutputParser diff --git a/libs/langchain/tests/unit_tests/agents/output_parsers/test_xml.py b/libs/langchain/tests/unit_tests/agents/output_parsers/test_xml.py index 54e39339aba15..1b271e247006e 100644 --- a/libs/langchain/tests/unit_tests/agents/output_parsers/test_xml.py +++ b/libs/langchain/tests/unit_tests/agents/output_parsers/test_xml.py @@ -1,4 +1,4 @@ -from langchain_core.schema.agent import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain.agents.output_parsers.xml import XMLAgentOutputParser diff --git a/libs/langchain/tests/unit_tests/agents/test_chat.py b/libs/langchain/tests/unit_tests/agents/test_chat.py index 069836a5ec193..3776c1df01839 100644 --- a/libs/langchain/tests/unit_tests/agents/test_chat.py +++ b/libs/langchain/tests/unit_tests/agents/test_chat.py @@ -1,7 +1,7 @@ """Unittests for langchain.agents.chat package.""" from typing import Tuple -from langchain_core.schema import AgentAction +from langchain_core.agents import AgentAction from langchain.agents.chat.output_parser import ChatOutputParser diff --git a/libs/langchain/tests/unit_tests/agents/test_mrkl.py b/libs/langchain/tests/unit_tests/agents/test_mrkl.py index b6cb39f31fef5..772642c60c6e6 100644 --- a/libs/langchain/tests/unit_tests/agents/test_mrkl.py +++ b/libs/langchain/tests/unit_tests/agents/test_mrkl.py @@ -3,8 +3,9 @@ from typing import Tuple import pytest +from langchain_core.agents import AgentAction +from langchain_core.exceptions import OutputParserException from langchain_core.prompts import PromptTemplate -from langchain_core.schema import AgentAction, OutputParserException from langchain.agents.mrkl.base import ZeroShotAgent from langchain.agents.mrkl.output_parser import MRKLOutputParser diff --git a/libs/langchain/tests/unit_tests/agents/test_mrkl_output_parser.py b/libs/langchain/tests/unit_tests/agents/test_mrkl_output_parser.py index 78803a0e5c908..f4ac56009c1b9 100644 --- a/libs/langchain/tests/unit_tests/agents/test_mrkl_output_parser.py +++ b/libs/langchain/tests/unit_tests/agents/test_mrkl_output_parser.py @@ -1,5 +1,6 @@ import pytest -from langchain_core.schema import AgentAction, AgentFinish, OutputParserException +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException from langchain.agents.mrkl.output_parser import ( MISSING_ACTION_AFTER_THOUGHT_ERROR_MESSAGE, diff --git a/libs/langchain/tests/unit_tests/agents/test_openai_functions_multi.py b/libs/langchain/tests/unit_tests/agents/test_openai_functions_multi.py index e846d803e573a..30bbd47a0f420 100644 --- a/libs/langchain/tests/unit_tests/agents/test_openai_functions_multi.py +++ b/libs/langchain/tests/unit_tests/agents/test_openai_functions_multi.py @@ -1,8 +1,9 @@ import json import pytest -from langchain_core.schema import AgentFinish, OutputParserException -from langchain_core.schema.messages import AIMessage, SystemMessage +from langchain_core.agents import AgentFinish +from langchain_core.exceptions import OutputParserException +from langchain_core.messages import AIMessage, SystemMessage from langchain.agents.openai_functions_multi_agent.base import ( _FunctionsAgentAction, diff --git a/libs/langchain/tests/unit_tests/agents/test_react.py b/libs/langchain/tests/unit_tests/agents/test_react.py index b5b8f0be9deb8..81ab3f464dd79 100644 --- a/libs/langchain/tests/unit_tests/agents/test_react.py +++ b/libs/langchain/tests/unit_tests/agents/test_react.py @@ -2,8 +2,8 @@ from typing import Union +from langchain_core.agents import AgentAction from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import AgentAction from langchain.agents.react.base import ReActChain, ReActDocstoreAgent from langchain.agents.tools import Tool diff --git a/libs/langchain/tests/unit_tests/agents/test_structured_chat.py b/libs/langchain/tests/unit_tests/agents/test_structured_chat.py index a739a9463cf51..65b8230efc515 100644 --- a/libs/langchain/tests/unit_tests/agents/test_structured_chat.py +++ b/libs/langchain/tests/unit_tests/agents/test_structured_chat.py @@ -2,13 +2,13 @@ from textwrap import dedent from typing import Any, Tuple +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, ) -from langchain_core.schema import AgentAction, AgentFinish -from langchain_core.tool import Tool +from langchain_core.tools import Tool from langchain.agents.structured_chat.base import StructuredChatAgent from langchain.agents.structured_chat.output_parser import StructuredChatOutputParser diff --git a/libs/langchain/tests/unit_tests/callbacks/fake_callback_handler.py b/libs/langchain/tests/unit_tests/callbacks/fake_callback_handler.py index 7d046f310a4df..84d4575013a88 100644 --- a/libs/langchain/tests/unit_tests/callbacks/fake_callback_handler.py +++ b/libs/langchain/tests/unit_tests/callbacks/fake_callback_handler.py @@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional, Union from uuid import UUID +from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import BaseModel -from langchain_core.schema.messages import BaseMessage from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler diff --git a/libs/langchain/tests/unit_tests/callbacks/test_callback_manager.py b/libs/langchain/tests/unit_tests/callbacks/test_callback_manager.py index e21c8e572351e..ce0bcaa910138 100644 --- a/libs/langchain/tests/unit_tests/callbacks/test_callback_manager.py +++ b/libs/langchain/tests/unit_tests/callbacks/test_callback_manager.py @@ -3,7 +3,9 @@ from unittest.mock import patch import pytest -from langchain_core.schema import AgentAction, AgentFinish, LLMResult +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.outputs import LLMResult +from langchain_core.tracers.langchain import LangChainTracer, wait_for_all_tracers from langchain.callbacks.base import BaseCallbackHandler from langchain.callbacks.manager import ( @@ -14,7 +16,6 @@ tracing_v2_enabled, ) from langchain.callbacks.stdout import StdOutCallbackHandler -from langchain.callbacks.tracers.langchain import LangChainTracer, wait_for_all_tracers from langchain.llms.openai import BaseOpenAI from tests.unit_tests.callbacks.fake_callback_handler import ( BaseFakeCallbackHandler, diff --git a/libs/langchain/tests/unit_tests/callbacks/test_openai_info.py b/libs/langchain/tests/unit_tests/callbacks/test_openai_info.py index a10f5cc782730..cc94c8bc5aa6d 100644 --- a/libs/langchain/tests/unit_tests/callbacks/test_openai_info.py +++ b/libs/langchain/tests/unit_tests/callbacks/test_openai_info.py @@ -2,7 +2,7 @@ from uuid import uuid4 import pytest -from langchain_core.schema import LLMResult +from langchain_core.outputs import LLMResult from langchain.callbacks import OpenAICallbackHandler from langchain.llms.openai import BaseOpenAI diff --git a/libs/langchain/tests/unit_tests/callbacks/test_schemas.py b/libs/langchain/tests/unit_tests/callbacks/test_schemas.py index 34937d7497162..a452b1587520c 100644 --- a/libs/langchain/tests/unit_tests/callbacks/test_schemas.py +++ b/libs/langchain/tests/unit_tests/callbacks/test_schemas.py @@ -1,5 +1,5 @@ -import langchain.callbacks.tracers.schemas as schemas -from langchain.callbacks.tracers.schemas import __all__ as schemas_all +import langchain_core.tracers.schemas as schemas +from langchain_core.tracers.schemas import __all__ as schemas_all def test_public_api() -> None: diff --git a/libs/langchain/tests/unit_tests/callbacks/tracers/test_base_tracer.py b/libs/langchain/tests/unit_tests/callbacks/tracers/test_base_tracer.py index 7c48fef6a2a95..f658abe260854 100644 --- a/libs/langchain/tests/unit_tests/callbacks/tracers/test_base_tracer.py +++ b/libs/langchain/tests/unit_tests/callbacks/tracers/test_base_tracer.py @@ -7,12 +7,12 @@ import pytest from freezegun import freeze_time -from langchain_core.schema import LLMResult -from langchain_core.schema.messages import HumanMessage +from langchain_core.messages import HumanMessage +from langchain_core.outputs import LLMResult +from langchain_core.tracers.base import BaseTracer, TracerException +from langchain_core.tracers.schemas import Run from langchain.callbacks.manager import CallbackManager -from langchain.callbacks.tracers.base import BaseTracer, TracerException -from langchain.callbacks.tracers.schemas import Run SERIALIZED = {"id": ["llm"]} SERIALIZED_CHAT = {"id": ["chat_model"]} diff --git a/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain.py b/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain.py index 61060d656aa82..022a5200a0e6f 100644 --- a/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain.py +++ b/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain.py @@ -6,12 +6,11 @@ from uuid import UUID import pytest -from langchain_core.schema.output import LLMResult +from langchain_core.outputs import LLMResult +from langchain_core.tracers.langchain import LangChainTracer +from langchain_core.tracers.schemas import Run from langsmith import Client -from langchain.callbacks.tracers.langchain import LangChainTracer -from langchain.callbacks.tracers.schemas import Run - def test_example_id_assignment_threadsafe() -> None: """Test that example assigned at callback start/end is honored.""" diff --git a/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain_v1.py b/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain_v1.py index b4cc6794d37e1..8746218f41db8 100644 --- a/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain_v1.py +++ b/libs/langchain/tests/unit_tests/callbacks/tracers/test_langchain_v1.py @@ -7,19 +7,19 @@ import pytest from freezegun import freeze_time -from langchain_core.callbacks.tracers.langchain_v1 import ( +from langchain_core.messages import HumanMessage +from langchain_core.outputs import LLMResult +from langchain_core.tracers.base import BaseTracer, TracerException +from langchain_core.tracers.langchain_v1 import ( ChainRun, LangChainTracerV1, LLMRun, ToolRun, TracerSessionV1, ) -from langchain_core.schema import LLMResult -from langchain_core.schema.messages import HumanMessage +from langchain_core.tracers.schemas import Run, TracerSessionV1Base from langchain.callbacks.manager import CallbackManager -from langchain.callbacks.tracers.base import BaseTracer, TracerException -from langchain.callbacks.tracers.schemas import Run, TracerSessionV1Base TEST_SESSION_ID = 2023 diff --git a/libs/langchain/tests/unit_tests/chains/test_base.py b/libs/langchain/tests/unit_tests/chains/test_base.py index a66e2aae1bd9c..2c410e2337ddb 100644 --- a/libs/langchain/tests/unit_tests/chains/test_base.py +++ b/libs/langchain/tests/unit_tests/chains/test_base.py @@ -2,10 +2,11 @@ from typing import Any, Dict, List, Optional import pytest -from langchain_core.schema import RUN_KEY, BaseMemory +from langchain_core.memory import BaseMemory from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain +from langchain.schema import RUN_KEY from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler diff --git a/libs/langchain/tests/unit_tests/chains/test_combine_documents.py b/libs/langchain/tests/unit_tests/chains/test_combine_documents.py index 62fff6269e3cc..70265b4a93ecf 100644 --- a/libs/langchain/tests/unit_tests/chains/test_combine_documents.py +++ b/libs/langchain/tests/unit_tests/chains/test_combine_documents.py @@ -3,8 +3,7 @@ from typing import Any, List import pytest -from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import format_document +from langchain_core.prompts import PromptTemplate, format_document from langchain.chains.combine_documents.reduce import ( collapse_docs, diff --git a/libs/langchain/tests/unit_tests/chains/test_conversation.py b/libs/langchain/tests/unit_tests/chains/test_conversation.py index 2eb88b5ab3093..86ecd647e8d45 100644 --- a/libs/langchain/tests/unit_tests/chains/test_conversation.py +++ b/libs/langchain/tests/unit_tests/chains/test_conversation.py @@ -1,7 +1,7 @@ """Test conversation chain and memory.""" import pytest +from langchain_core.memory import BaseMemory from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import BaseMemory from langchain.chains.conversation.base import ConversationChain from langchain.memory.buffer import ConversationBufferMemory diff --git a/libs/langchain/tests/unit_tests/chains/test_conversation_retrieval.py b/libs/langchain/tests/unit_tests/chains/test_conversation_retrieval.py index cd322f1dd6607..a97e77867632e 100644 --- a/libs/langchain/tests/unit_tests/chains/test_conversation_retrieval.py +++ b/libs/langchain/tests/unit_tests/chains/test_conversation_retrieval.py @@ -1,5 +1,5 @@ """Test conversation chain and memory.""" -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.chains.conversational_retrieval.base import ConversationalRetrievalChain from langchain.llms.fake import FakeListLLM diff --git a/libs/langchain/tests/unit_tests/chains/test_hyde.py b/libs/langchain/tests/unit_tests/chains/test_hyde.py index f617f9c8c716d..4be98304789c7 100644 --- a/libs/langchain/tests/unit_tests/chains/test_hyde.py +++ b/libs/langchain/tests/unit_tests/chains/test_hyde.py @@ -2,8 +2,8 @@ from typing import Any, List, Optional import numpy as np -from langchain_core.schema import Generation, LLMResult -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings +from langchain_core.outputs import Generation, LLMResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/tests/unit_tests/chains/test_llm.py b/libs/langchain/tests/unit_tests/chains/test_llm.py index 58ede4212ea04..8bcc009b82895 100644 --- a/libs/langchain/tests/unit_tests/chains/test_llm.py +++ b/libs/langchain/tests/unit_tests/chains/test_llm.py @@ -4,8 +4,8 @@ from unittest.mock import patch import pytest -from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import PromptTemplate from langchain.chains.llm import LLMChain from langchain.chains.loading import load_chain diff --git a/libs/langchain/tests/unit_tests/chains/test_memory.py b/libs/langchain/tests/unit_tests/chains/test_memory.py index 4a57d5a1b1e66..dae9cc3661022 100644 --- a/libs/langchain/tests/unit_tests/chains/test_memory.py +++ b/libs/langchain/tests/unit_tests/chains/test_memory.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema import BaseMemory +from langchain_core.memory import BaseMemory from langchain.chains.conversation.memory import ( ConversationBufferMemory, diff --git a/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py b/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py index 56df813f41d0b..e06f9291b90ad 100644 --- a/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py +++ b/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py @@ -5,7 +5,7 @@ from typing import Sequence import pytest -from langchain_core.schema import AIMessage, BaseMessage, HumanMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain.chat_loaders import telegram, utils diff --git a/libs/langchain/tests/unit_tests/chat_models/test_anthropic.py b/libs/langchain/tests/unit_tests/chat_models/test_anthropic.py index dd8e908459db7..aa9e8b28f670a 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_anthropic.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_anthropic.py @@ -3,7 +3,7 @@ from typing import List import pytest -from langchain_core.schema import AIMessage, BaseMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, SystemMessage from langchain.chat_models import ChatAnthropic from langchain.chat_models.anthropic import convert_messages_to_prompt_anthropic diff --git a/libs/langchain/tests/unit_tests/chat_models/test_baichuan.py b/libs/langchain/tests/unit_tests/chat_models/test_baichuan.py index fe696a9df3970..1168f8dbeb3d6 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_baichuan.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_baichuan.py @@ -1,6 +1,5 @@ import pytest -from langchain_core.pydantic_v1 import SecretStr -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, ChatMessage, @@ -9,6 +8,7 @@ HumanMessageChunk, SystemMessage, ) +from langchain_core.pydantic_v1 import SecretStr from langchain.chat_models.baichuan import ( _convert_delta_to_message_chunk, diff --git a/libs/langchain/tests/unit_tests/chat_models/test_ernie.py b/libs/langchain/tests/unit_tests/chat_models/test_ernie.py index 472157246fcce..e9550388756d8 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_ernie.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_ernie.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, FunctionMessage, HumanMessage, diff --git a/libs/langchain/tests/unit_tests/chat_models/test_google_palm.py b/libs/langchain/tests/unit_tests/chat_models/test_google_palm.py index d3e31d2b33d5e..71ac24968be57 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_google_palm.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_google_palm.py @@ -1,7 +1,7 @@ """Test Google PaLM Chat API wrapper.""" import pytest -from langchain_core.schema.messages import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain.chat_models.google_palm import ( ChatGooglePalm, diff --git a/libs/langchain/tests/unit_tests/chat_models/test_hunyuan.py b/libs/langchain/tests/unit_tests/chat_models/test_hunyuan.py index e5b1ba6b0af38..d02fd441f673b 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_hunyuan.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_hunyuan.py @@ -1,6 +1,5 @@ import pytest -from langchain_core.pydantic_v1 import SecretStr -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, AIMessageChunk, ChatMessage, @@ -9,6 +8,7 @@ HumanMessageChunk, SystemMessage, ) +from langchain_core.pydantic_v1 import SecretStr from langchain.chat_models.hunyuan import ( _convert_delta_to_message_chunk, diff --git a/libs/langchain/tests/unit_tests/chat_models/test_openai.py b/libs/langchain/tests/unit_tests/chat_models/test_openai.py index 2fe5cc94bd4b6..54b74e2341d0a 100644 --- a/libs/langchain/tests/unit_tests/chat_models/test_openai.py +++ b/libs/langchain/tests/unit_tests/chat_models/test_openai.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock, patch import pytest -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessage, FunctionMessage, HumanMessage, diff --git a/libs/langchain/tests/unit_tests/docstore/test_arbitrary_fn.py b/libs/langchain/tests/unit_tests/docstore/test_arbitrary_fn.py index 2de54d6839fac..f3646eb30d202 100644 --- a/libs/langchain/tests/unit_tests/docstore/test_arbitrary_fn.py +++ b/libs/langchain/tests/unit_tests/docstore/test_arbitrary_fn.py @@ -1,4 +1,4 @@ -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.docstore.arbitrary_fn import DocstoreFn diff --git a/libs/langchain/tests/unit_tests/document_loaders/parsers/test_generic.py b/libs/langchain/tests/unit_tests/document_loaders/parsers/test_generic.py index 121339e5b1df8..202e8df51d92d 100644 --- a/libs/langchain/tests/unit_tests/document_loaders/parsers/test_generic.py +++ b/libs/langchain/tests/unit_tests/document_loaders/parsers/test_generic.py @@ -3,7 +3,7 @@ from typing import Iterator import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob diff --git a/libs/langchain/tests/unit_tests/document_loaders/test_base.py b/libs/langchain/tests/unit_tests/document_loaders/test_base.py index 77df9a031eb8a..cc3c049f9f3e9 100644 --- a/libs/langchain/tests/unit_tests/document_loaders/test_base.py +++ b/libs/langchain/tests/unit_tests/document_loaders/test_base.py @@ -1,7 +1,7 @@ """Test Base Schema of documents.""" from typing import Iterator -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob diff --git a/libs/langchain/tests/unit_tests/document_loaders/test_generic_loader.py b/libs/langchain/tests/unit_tests/document_loaders/test_generic_loader.py index 72ba1c6edf61f..5603f87c1e54f 100644 --- a/libs/langchain/tests/unit_tests/document_loaders/test_generic_loader.py +++ b/libs/langchain/tests/unit_tests/document_loaders/test_generic_loader.py @@ -5,7 +5,7 @@ from typing import Generator, Iterator import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.document_loaders.base import BaseBlobParser from langchain.document_loaders.blob_loaders import Blob, FileSystemBlobLoader diff --git a/libs/langchain/tests/unit_tests/document_transformers/test_beautiful_soup_transformer.py b/libs/langchain/tests/unit_tests/document_transformers/test_beautiful_soup_transformer.py index 8996dbb341a86..252a052c7869b 100644 --- a/libs/langchain/tests/unit_tests/document_transformers/test_beautiful_soup_transformer.py +++ b/libs/langchain/tests/unit_tests/document_transformers/test_beautiful_soup_transformer.py @@ -1,6 +1,6 @@ """Unit tests for beautiful soup document transformer.""" import pytest -from langchain_core.schema.document import Document +from langchain_core.documents import Document from langchain.document_transformers import BeautifulSoupTransformer diff --git a/libs/langchain/tests/unit_tests/embeddings/test_caching.py b/libs/langchain/tests/unit_tests/embeddings/test_caching.py index 7da4258ca0a61..48c6adbab6600 100644 --- a/libs/langchain/tests/unit_tests/embeddings/test_caching.py +++ b/libs/langchain/tests/unit_tests/embeddings/test_caching.py @@ -2,7 +2,7 @@ from typing import List import pytest -from langchain_core.schema.embeddings import Embeddings +from langchain_core.embeddings import Embeddings from langchain.embeddings import CacheBackedEmbeddings from langchain.storage.in_memory import InMemoryStore diff --git a/libs/langchain/tests/unit_tests/evaluation/agents/test_eval_chain.py b/libs/langchain/tests/unit_tests/evaluation/agents/test_eval_chain.py index dff9f4b278e6f..660d52f8f743b 100644 --- a/libs/langchain/tests/unit_tests/evaluation/agents/test_eval_chain.py +++ b/libs/langchain/tests/unit_tests/evaluation/agents/test_eval_chain.py @@ -3,8 +3,9 @@ from typing import Any, Dict, List, Optional, Tuple import pytest +from langchain_core.agents import AgentAction, BaseMessage +from langchain_core.exceptions import OutputParserException from langchain_core.pydantic_v1 import Field -from langchain_core.schema import AgentAction, BaseMessage, OutputParserException from langchain.callbacks.manager import CallbackManagerForLLMRun from langchain.evaluation.agents.trajectory_eval_chain import ( diff --git a/libs/langchain/tests/unit_tests/indexes/test_hashed_document.py b/libs/langchain/tests/unit_tests/indexes/test_hashed_document.py index 1fe61e33936f9..2cf60e3a1a05a 100644 --- a/libs/langchain/tests/unit_tests/indexes/test_hashed_document.py +++ b/libs/langchain/tests/unit_tests/indexes/test_hashed_document.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.indexes._api import _HashedDocument diff --git a/libs/langchain/tests/unit_tests/indexes/test_indexing.py b/libs/langchain/tests/unit_tests/indexes/test_indexing.py index 1082927e79003..aacc138d95b63 100644 --- a/libs/langchain/tests/unit_tests/indexes/test_indexing.py +++ b/libs/langchain/tests/unit_tests/indexes/test_indexing.py @@ -14,8 +14,8 @@ import pytest import pytest_asyncio -from langchain_core.schema import Document -from langchain_core.schema.vectorstore import VST, VectorStore +from langchain_core.documents import Document +from langchain_core.vectorstores import VST, VectorStore import langchain.vectorstores from langchain.document_loaders.base import BaseLoader diff --git a/libs/langchain/tests/unit_tests/llms/fake_chat_model.py b/libs/langchain/tests/unit_tests/llms/fake_chat_model.py index dd79b830348fd..249401ec810eb 100644 --- a/libs/langchain/tests/unit_tests/llms/fake_chat_model.py +++ b/libs/langchain/tests/unit_tests/llms/fake_chat_model.py @@ -1,8 +1,8 @@ """Fake Chat Model wrapper for testing purposes.""" from typing import Any, Dict, List, Optional -from langchain_core.schema import ChatGeneration, ChatResult -from langchain_core.schema.messages import AIMessage, BaseMessage +from langchain_core.messages import AIMessage, BaseMessage +from langchain_core.outputs import ChatGeneration, ChatResult from langchain.callbacks.manager import ( AsyncCallbackManagerForLLMRun, diff --git a/libs/langchain/tests/unit_tests/llms/test_base.py b/libs/langchain/tests/unit_tests/llms/test_base.py index 2d5f0a880983f..c43e73bd117c2 100644 --- a/libs/langchain/tests/unit_tests/llms/test_base.py +++ b/libs/langchain/tests/unit_tests/llms/test_base.py @@ -6,7 +6,7 @@ except ImportError: from sqlalchemy.ext.declarative import declarative_base -from langchain_core.schema import Generation, LLMResult +from langchain_core.outputs import Generation, LLMResult from langchain.cache import InMemoryCache, SQLAlchemyCache from langchain.globals import get_llm_cache, set_llm_cache diff --git a/libs/langchain/tests/unit_tests/llms/test_callbacks.py b/libs/langchain/tests/unit_tests/llms/test_callbacks.py index 166c8528186f4..84e6b2b3bbf9e 100644 --- a/libs/langchain/tests/unit_tests/llms/test_callbacks.py +++ b/libs/langchain/tests/unit_tests/llms/test_callbacks.py @@ -1,5 +1,5 @@ """Test LLM callbacks.""" -from langchain_core.schema.messages import HumanMessage +from langchain_core.messages import HumanMessage from langchain.chat_models.fake import FakeListChatModel from langchain.llms.fake import FakeListLLM diff --git a/libs/langchain/tests/unit_tests/load/test_dump.py b/libs/langchain/tests/unit_tests/load/test_dump.py index 72f1135823d19..8e0dcd25fdc69 100644 --- a/libs/langchain/tests/unit_tests/load/test_dump.py +++ b/libs/langchain/tests/unit_tests/load/test_dump.py @@ -7,8 +7,8 @@ from langchain_core.load.serializable import Serializable from langchain_core.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.tracers.langchain import LangChainTracer -from langchain.callbacks.tracers.langchain import LangChainTracer from langchain.chains.llm import LLMChain from langchain.chat_models.openai import ChatOpenAI from langchain.llms.openai import OpenAI diff --git a/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_file.py b/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_file.py index 19b17a799d1bf..63d9e9988cbd8 100644 --- a/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_file.py +++ b/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_file.py @@ -3,7 +3,7 @@ from typing import Generator import pytest -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain.memory.chat_message_histories import FileChatMessageHistory diff --git a/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_sql.py b/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_sql.py index 5e451988a7fdf..9dc9107e7e973 100644 --- a/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_sql.py +++ b/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_sql.py @@ -2,7 +2,7 @@ from typing import Any, Generator, Tuple import pytest -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from sqlalchemy import Column, Integer, Text from sqlalchemy.orm import DeclarativeBase diff --git a/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_streamlit.py b/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_streamlit.py index c2c88b9c72831..84a29bfc0132c 100644 --- a/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_streamlit.py +++ b/libs/langchain/tests/unit_tests/memory/chat_message_histories/test_streamlit.py @@ -6,7 +6,7 @@ import streamlit as st from langchain.memory import ConversationBufferMemory from langchain.memory.chat_message_histories import StreamlitChatMessageHistory - from langchain_core.schema.messages import _message_to_dict + from langchain_core.messages import message_to_dict message_history = StreamlitChatMessageHistory() memory = ConversationBufferMemory(chat_memory=message_history, return_messages=True) @@ -25,7 +25,7 @@ # Write the output to st.code as a json blob for inspection messages = memory.chat_memory.messages - messages_json = json.dumps([_message_to_dict(msg) for msg in messages]) + messages_json = json.dumps([message_to_dict(msg) for msg in messages]) st.text(messages_json) """ diff --git a/libs/langchain/tests/unit_tests/output_parsers/test_enum_parser.py b/libs/langchain/tests/unit_tests/output_parsers/test_enum_parser.py index ee35e85bbbd29..57fe3e0717bce 100644 --- a/libs/langchain/tests/unit_tests/output_parsers/test_enum_parser.py +++ b/libs/langchain/tests/unit_tests/output_parsers/test_enum_parser.py @@ -1,6 +1,6 @@ from enum import Enum -from langchain_core.schema import OutputParserException +from langchain_core.exceptions import OutputParserException from langchain.output_parsers.enum import EnumOutputParser diff --git a/libs/langchain/tests/unit_tests/output_parsers/test_json.py b/libs/langchain/tests/unit_tests/output_parsers/test_json.py index d9bae651b62bb..52f59d9aaf0cc 100644 --- a/libs/langchain/tests/unit_tests/output_parsers/test_json.py +++ b/libs/langchain/tests/unit_tests/output_parsers/test_json.py @@ -2,7 +2,7 @@ from typing import Any, AsyncIterator, Iterator, Tuple import pytest -from langchain_core.schema.messages import AIMessageChunk +from langchain_core.messages import AIMessageChunk from langchain.output_parsers.json import ( SimpleJsonOutputParser, diff --git a/libs/langchain/tests/unit_tests/output_parsers/test_openai_functions.py b/libs/langchain/tests/unit_tests/output_parsers/test_openai_functions.py index a86696603dc0e..6af2be93c319a 100644 --- a/libs/langchain/tests/unit_tests/output_parsers/test_openai_functions.py +++ b/libs/langchain/tests/unit_tests/output_parsers/test_openai_functions.py @@ -1,8 +1,9 @@ from typing import Any, Dict import pytest -from langchain_core.schema import BaseMessage, ChatGeneration, OutputParserException -from langchain_core.schema.messages import AIMessage, HumanMessage +from langchain_core.exceptions import OutputParserException +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration from langchain.output_parsers.openai_functions import ( JsonOutputFunctionsParser, diff --git a/libs/langchain/tests/unit_tests/output_parsers/test_pydantic_parser.py b/libs/langchain/tests/unit_tests/output_parsers/test_pydantic_parser.py index b7b693256cdae..50565292857e0 100644 --- a/libs/langchain/tests/unit_tests/output_parsers/test_pydantic_parser.py +++ b/libs/langchain/tests/unit_tests/output_parsers/test_pydantic_parser.py @@ -2,8 +2,8 @@ from enum import Enum from typing import Optional +from langchain_core.exceptions import OutputParserException from langchain_core.pydantic_v1 import BaseModel, Field -from langchain_core.schema import OutputParserException from langchain.output_parsers.pydantic import PydanticOutputParser diff --git a/libs/langchain/tests/unit_tests/output_parsers/test_structured_parser.py b/libs/langchain/tests/unit_tests/output_parsers/test_structured_parser.py index 9a59a6ea9e18b..8fec872eb26e3 100644 --- a/libs/langchain/tests/unit_tests/output_parsers/test_structured_parser.py +++ b/libs/langchain/tests/unit_tests/output_parsers/test_structured_parser.py @@ -1,4 +1,4 @@ -from langchain_core.schema import OutputParserException +from langchain_core.exceptions import OutputParserException from langchain.output_parsers import ResponseSchema, StructuredOutputParser diff --git a/libs/langchain/tests/unit_tests/retrievers/self_query/test_base.py b/libs/langchain/tests/unit_tests/retrievers/self_query/test_base.py index 8797b6a4c968e..b181e964de425 100644 --- a/libs/langchain/tests/unit_tests/retrievers/self_query/test_base.py +++ b/libs/langchain/tests/unit_tests/retrievers/self_query/test_base.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List, Tuple, Union import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/tests/unit_tests/retrievers/sequential_retriever.py b/libs/langchain/tests/unit_tests/retrievers/sequential_retriever.py index 41830958b0839..45f7a6934d8ab 100644 --- a/libs/langchain/tests/unit_tests/retrievers/sequential_retriever.py +++ b/libs/langchain/tests/unit_tests/retrievers/sequential_retriever.py @@ -1,6 +1,6 @@ from typing import List -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever, Document class SequentialRetriever(BaseRetriever): diff --git a/libs/langchain/tests/unit_tests/retrievers/test_base.py b/libs/langchain/tests/unit_tests/retrievers/test_base.py index f543a0d368ece..7b1dc2da2b0ef 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_base.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_base.py @@ -5,7 +5,7 @@ from typing import Dict, List, Optional import pytest -from langchain_core.schema import BaseRetriever, Document +from langchain_core.retrievers import BaseRetriever, Document from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, diff --git a/libs/langchain/tests/unit_tests/retrievers/test_bm25.py b/libs/langchain/tests/unit_tests/retrievers/test_bm25.py index 1fd1512e8f534..3d788476e7f0f 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_bm25.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_bm25.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers.bm25 import BM25Retriever diff --git a/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py b/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py index 231dbf035b771..a84ee1f26a734 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers.bm25 import BM25Retriever from langchain.retrievers.ensemble import EnsembleRetriever diff --git a/libs/langchain/tests/unit_tests/retrievers/test_multi_query.py b/libs/langchain/tests/unit_tests/retrievers/test_multi_query.py index 59ba4463f5f5c..8f80e77e79b09 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_multi_query.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_multi_query.py @@ -1,7 +1,7 @@ from typing import List import pytest as pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers.multi_query import _unique_documents diff --git a/libs/langchain/tests/unit_tests/retrievers/test_remote_retriever.py b/libs/langchain/tests/unit_tests/retrievers/test_remote_retriever.py index a77abf8fc17c0..2374d6e101c68 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_remote_retriever.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_remote_retriever.py @@ -1,6 +1,6 @@ from typing import Any, Dict -from langchain_core.schema import Document +from langchain_core.documents import Document from pytest_mock import MockerFixture from langchain.retrievers import RemoteLangChainRetriever diff --git a/libs/langchain/tests/unit_tests/retrievers/test_svm.py b/libs/langchain/tests/unit_tests/retrievers/test_svm.py index 9379648b254e4..defdfce24c711 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_svm.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_svm.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.embeddings import FakeEmbeddings from langchain.retrievers.svm import SVMRetriever diff --git a/libs/langchain/tests/unit_tests/retrievers/test_tfidf.py b/libs/langchain/tests/unit_tests/retrievers/test_tfidf.py index 34a78caad4631..6b2428769a82f 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_tfidf.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_tfidf.py @@ -3,7 +3,7 @@ from tempfile import TemporaryDirectory import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.retrievers.tfidf import TFIDFRetriever diff --git a/libs/langchain/tests/unit_tests/retrievers/test_time_weighted_retriever.py b/libs/langchain/tests/unit_tests/retrievers/test_time_weighted_retriever.py index 4fc2d42a13250..cfbf70de49da2 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_time_weighted_retriever.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_time_weighted_retriever.py @@ -4,9 +4,9 @@ from typing import Any, Iterable, List, Optional, Tuple, Type import pytest -from langchain_core.schema import Document -from langchain_core.schema.embeddings import Embeddings -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.vectorstores import VectorStore from langchain.retrievers.time_weighted_retriever import ( TimeWeightedVectorStoreRetriever, diff --git a/libs/langchain/tests/unit_tests/retrievers/test_you.py b/libs/langchain/tests/unit_tests/retrievers/test_you.py index 703b5ddf9cd24..2682365d3ad40 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_you.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_you.py @@ -2,7 +2,7 @@ import os from unittest import mock -from langchain_core.schema import Document +from langchain_core.documents import Document from requests import Response from langchain.retrievers.you import YouRetriever diff --git a/libs/langchain/tests/unit_tests/runnables/test_openai_functions.py b/libs/langchain/tests/unit_tests/runnables/test_openai_functions.py index 7c78fd360241a..27f6a4c0cfe99 100644 --- a/libs/langchain/tests/unit_tests/runnables/test_openai_functions.py +++ b/libs/langchain/tests/unit_tests/runnables/test_openai_functions.py @@ -1,8 +1,7 @@ from typing import Any, List, Optional -from langchain_core.schema import ChatResult -from langchain_core.schema.messages import AIMessage, BaseMessage -from langchain_core.schema.output import ChatGeneration +from langchain_core.messages import AIMessage, BaseMessage +from langchain_core.outputs import ChatGeneration, ChatResult from pytest_mock import MockerFixture from syrupy import SnapshotAssertion diff --git a/libs/langchain/tests/unit_tests/schema/test_imports.py b/libs/langchain/tests/unit_tests/schema/test_imports.py index 5bc2f228798ae..d667e3d4c1040 100644 --- a/libs/langchain/tests/unit_tests/schema/test_imports.py +++ b/libs/langchain/tests/unit_tests/schema/test_imports.py @@ -1,4 +1,4 @@ -from langchain_core.schema import __all__ +from langchain.schema import __all__ EXPECTED_ALL = [ "BaseCache", @@ -17,7 +17,7 @@ "SystemMessage", "messages_from_dict", "messages_to_dict", - "_message_to_dict", + "message_to_dict", "_message_from_dict", "get_buffer_string", "RunInfo", diff --git a/libs/langchain/tests/unit_tests/schema/test_messages.py b/libs/langchain/tests/unit_tests/schema/test_messages.py index 6cfe6d2649b27..37523b7c7595f 100644 --- a/libs/langchain/tests/unit_tests/schema/test_messages.py +++ b/libs/langchain/tests/unit_tests/schema/test_messages.py @@ -1,5 +1,5 @@ import pytest -from langchain_core.schema.messages import ( +from langchain_core.messages import ( AIMessageChunk, ChatMessageChunk, FunctionMessageChunk, diff --git a/libs/langchain/tests/unit_tests/schema/test_output.py b/libs/langchain/tests/unit_tests/schema/test_output.py index 5e086c5e5a363..a2f6e3e80e2b8 100644 --- a/libs/langchain/tests/unit_tests/schema/test_output.py +++ b/libs/langchain/tests/unit_tests/schema/test_output.py @@ -1,5 +1,5 @@ -from langchain_core.schema.messages import HumanMessageChunk -from langchain_core.schema.output import ChatGenerationChunk, GenerationChunk +from langchain_core.messages import HumanMessageChunk +from langchain_core.outputs import ChatGenerationChunk, GenerationChunk def test_generation_chunk() -> None: diff --git a/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py b/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py index 89ee06a7a9319..78e123a71f69a 100644 --- a/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py +++ b/libs/langchain/tests/unit_tests/smith/evaluation/test_runner_utils.py @@ -6,7 +6,7 @@ import pytest from freezegun import freeze_time -from langchain_core.schema.language_model import BaseLanguageModel +from langchain_core.language_models import BaseLanguageModel from langsmith.client import Client from langsmith.schemas import Dataset, Example diff --git a/libs/langchain/tests/unit_tests/storage/test_lc_store.py b/libs/langchain/tests/unit_tests/storage/test_lc_store.py index 5b1eff98c82c4..4fa2fc7bbfe63 100644 --- a/libs/langchain/tests/unit_tests/storage/test_lc_store.py +++ b/libs/langchain/tests/unit_tests/storage/test_lc_store.py @@ -2,7 +2,7 @@ from typing import Generator, cast import pytest -from langchain_core.schema import Document +from langchain_core.documents import Document from langchain.storage._lc_store import create_kv_docstore, create_lc_store from langchain.storage.file_system import LocalFileStore diff --git a/libs/langchain/tests/unit_tests/test_cache.py b/libs/langchain/tests/unit_tests/test_cache.py index e975174765874..74e5b120217f5 100644 --- a/libs/langchain/tests/unit_tests/test_cache.py +++ b/libs/langchain/tests/unit_tests/test_cache.py @@ -4,11 +4,8 @@ import pytest from _pytest.fixtures import FixtureRequest from langchain_core.load import dumps -from langchain_core.schema import ( - ChatGeneration, - Generation, -) -from langchain_core.schema.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage +from langchain_core.outputs import ChatGeneration, Generation from sqlalchemy import create_engine from sqlalchemy.orm import Session diff --git a/libs/langchain/tests/unit_tests/test_dependencies.py b/libs/langchain/tests/unit_tests/test_dependencies.py index 05476df3651a2..adb9ae2649bc7 100644 --- a/libs/langchain/tests/unit_tests/test_dependencies.py +++ b/libs/langchain/tests/unit_tests/test_dependencies.py @@ -92,7 +92,7 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None: def test_imports() -> None: """Test that you can import all top level things okay.""" - from langchain_core.schema import BasePromptTemplate # noqa: F401 + from langchain_core.prompts import BasePromptTemplate # noqa: F401 from langchain.agents import OpenAIFunctionsAgent # noqa: F401 from langchain.callbacks import OpenAICallbackHandler # noqa: F401 diff --git a/libs/langchain/tests/unit_tests/test_schema.py b/libs/langchain/tests/unit_tests/test_schema.py index 7f833236339f2..0cb6bc92e196f 100644 --- a/libs/langchain/tests/unit_tests/test_schema.py +++ b/libs/langchain/tests/unit_tests/test_schema.py @@ -3,18 +3,9 @@ from typing import Union import pytest -from langchain_core.prompts.base import StringPromptValue -from langchain_core.prompts.chat import ChatPromptValueConcrete -from langchain_core.pydantic_v1 import BaseModel, ValidationError -from langchain_core.schema import ( - AgentAction, - AgentFinish, - ChatGeneration, - Document, - Generation, -) -from langchain_core.schema.agent import AgentActionMessageLog -from langchain_core.schema.messages import ( +from langchain_core.agents import AgentAction, AgentActionMessageLog, AgentFinish +from langchain_core.documents import Document +from langchain_core.messages import ( AIMessage, AIMessageChunk, ChatMessage, @@ -29,7 +20,9 @@ messages_from_dict, messages_to_dict, ) -from langchain_core.schema.output import ChatGenerationChunk +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, Generation +from langchain_core.prompts import ChatPromptValueConcrete, StringPromptValue +from langchain_core.pydantic_v1 import BaseModel, ValidationError class TestGetBufferString(unittest.TestCase): diff --git a/libs/langchain/tests/unit_tests/vectorstores/test_imports.py b/libs/langchain/tests/unit_tests/vectorstores/test_imports.py index 633dbad9957fd..f8dd5dc977376 100644 --- a/libs/langchain/tests/unit_tests/vectorstores/test_imports.py +++ b/libs/langchain/tests/unit_tests/vectorstores/test_imports.py @@ -1,4 +1,4 @@ -from langchain_core.schema.vectorstore import VectorStore +from langchain_core.vectorstores import VectorStore from langchain import vectorstores