diff --git a/docs/docs/expression_language/why.ipynb b/docs/docs/expression_language/why.ipynb index 3c0eaf0e57539..9ee71c4e77108 100644 --- a/docs/docs/expression_language/why.ipynb +++ b/docs/docs/expression_language/why.ipynb @@ -1007,7 +1007,7 @@ "from langchain_openai import OpenAI\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", - "from langchain_core.runnables import RunnablePassthrough\n", + "from langchain_core.runnables import RunnablePassthrough, ConfigurableField\n", "\n", "os.environ[\"LANGCHAIN_API_KEY\"] = \"...\"\n", "os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"\n", diff --git a/libs/community/langchain_community/cache.py b/libs/community/langchain_community/cache.py index 14af98fdfb7fc..0ad635092e8ad 100644 --- a/libs/community/langchain_community/cache.py +++ b/libs/community/langchain_community/cache.py @@ -56,7 +56,7 @@ from langchain_core.embeddings import Embeddings from langchain_core.language_models.llms import LLM, get_prompts from langchain_core.load.dump import dumps -from langchain_core.load.load import _loads_suppress_warning +from langchain_core.load.load import loads from langchain_core.outputs import ChatGeneration, Generation from langchain_core.utils import get_from_env @@ -149,10 +149,7 @@ def _loads_generations(generations_str: str) -> Union[RETURN_VAL_TYPE, None]: RETURN_VAL_TYPE: A list of generations. """ try: - generations = [ - _loads_suppress_warning(_item_str) - for _item_str in json.loads(generations_str) - ] + generations = [loads(_item_str) for _item_str in json.loads(generations_str)] return generations except (json.JSONDecodeError, TypeError): # deferring the (soft) handling to after the legacy-format attempt @@ -227,7 +224,7 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: rows = session.execute(stmt).fetchall() if rows: try: - return [_loads_suppress_warning(row[0]) for row in rows] + return [loads(row[0]) for row in rows] except Exception: logger.warning( "Retrieving a cache value that could not be deserialized " @@ -398,7 +395,7 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: if results: for _, text in results.items(): try: - generations.append(_loads_suppress_warning(text)) + generations.append(loads(text)) except Exception: logger.warning( "Retrieving a cache value that could not be deserialized " @@ -538,9 +535,7 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: if results: for document in results: try: - generations.extend( - _loads_suppress_warning(document.metadata["return_val"]) - ) + generations.extend(loads(document.metadata["return_val"])) except Exception: logger.warning( "Retrieving a cache value that could not be deserialized " @@ -1190,7 +1185,7 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: """Look up based on prompt and llm_string.""" rows = self._search_rows(prompt, llm_string) if rows: - return [_loads_suppress_warning(row[0]) for row in rows] + return [loads(row[0]) for row in rows] return None def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: diff --git a/libs/community/langchain_community/chat_loaders/langsmith.py b/libs/community/langchain_community/chat_loaders/langsmith.py index 03e5e9c75705e..4e769283dd2e4 100644 --- a/libs/community/langchain_community/chat_loaders/langsmith.py +++ b/libs/community/langchain_community/chat_loaders/langsmith.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Dict, Iterable, Iterator, List, Optional, Union, cast from langchain_core.chat_sessions import ChatSession -from langchain_core.load.load import _load_suppress_warning +from langchain_core.load.load import load from langchain_community.chat_loaders.base import BaseChatLoader @@ -66,10 +66,8 @@ def _get_messages_from_llm_run(llm_run: "Run") -> ChatSession: raise ValueError(f"Run has no 'messages' inputs. Got {llm_run.inputs}") if not llm_run.outputs: raise ValueError("Cannot convert pending run") - messages = _load_suppress_warning(llm_run.inputs)["messages"] - message_chunk = _load_suppress_warning(llm_run.outputs)["generations"][0][ - "message" - ] + messages = load(llm_run.inputs)["messages"] + message_chunk = load(llm_run.outputs)["generations"][0]["message"] return ChatSession(messages=messages + [message_chunk]) @staticmethod diff --git a/libs/community/langchain_community/chat_models/anyscale.py b/libs/community/langchain_community/chat_models/anyscale.py index 2f734aa9b73a1..40d36aa187dcb 100644 --- a/libs/community/langchain_community/chat_models/anyscale.py +++ b/libs/community/langchain_community/chat_models/anyscale.py @@ -4,10 +4,9 @@ import logging import os import sys -from typing import TYPE_CHECKING, Any, Dict, Optional, Set +from typing import TYPE_CHECKING, Dict, Optional, Set import requests -from langchain_core._api.deprecation import suppress_langchain_deprecation_warning from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env @@ -73,11 +72,6 @@ def is_lc_serializable(cls) -> bool: available_models: Optional[Set[str]] = None """Available models from Anyscale API.""" - def __init__(self, *kwargs: Any) -> None: - # bypass deprecation warning for ChatOpenAI - with suppress_langchain_deprecation_warning(): - super().__init__(*kwargs) - @staticmethod def get_available_models( anyscale_api_key: Optional[str] = None, diff --git a/libs/community/langchain_community/chat_models/everlyai.py b/libs/community/langchain_community/chat_models/everlyai.py index 5d3999308af0f..dca315d18995d 100644 --- a/libs/community/langchain_community/chat_models/everlyai.py +++ b/libs/community/langchain_community/chat_models/everlyai.py @@ -3,9 +3,8 @@ import logging import sys -from typing import TYPE_CHECKING, Any, Dict, Optional, Set +from typing import TYPE_CHECKING, Dict, Optional, Set -from langchain_core._api.deprecation import suppress_langchain_deprecation_warning from langchain_core.messages import BaseMessage from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.utils import get_from_dict_or_env @@ -65,11 +64,6 @@ def is_lc_serializable(cls) -> bool: available_models: Optional[Set[str]] = None """Available models from EverlyAI API.""" - def __init__(self, *kwargs: Any) -> None: - # bypass deprecation warning for ChatOpenAI - with suppress_langchain_deprecation_warning(): - super().__init__(*kwargs) - @staticmethod def get_available_models() -> Set[str]: """Get available models from EverlyAI API.""" diff --git a/libs/community/langchain_community/chat_models/promptlayer_openai.py b/libs/community/langchain_community/chat_models/promptlayer_openai.py index 0ff2e9b439386..551655e4c7dc6 100644 --- a/libs/community/langchain_community/chat_models/promptlayer_openai.py +++ b/libs/community/langchain_community/chat_models/promptlayer_openai.py @@ -2,7 +2,6 @@ import datetime from typing import Any, Dict, List, Optional -from langchain_core._api.deprecation import suppress_langchain_deprecation_warning from langchain_core.callbacks import ( AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, @@ -40,11 +39,6 @@ class PromptLayerChatOpenAI(ChatOpenAI): pl_tags: Optional[List[str]] return_pl_id: Optional[bool] = False - def __init__(self, *kwargs: Any) -> None: - # bypass deprecation warning for ChatOpenAI - with suppress_langchain_deprecation_warning(): - super().__init__(*kwargs) - @classmethod def is_lc_serializable(cls) -> bool: return False diff --git a/libs/community/langchain_community/llms/promptlayer_openai.py b/libs/community/langchain_community/llms/promptlayer_openai.py index 7f3396c8addff..cb904476e8ba3 100644 --- a/libs/community/langchain_community/llms/promptlayer_openai.py +++ b/libs/community/langchain_community/llms/promptlayer_openai.py @@ -1,7 +1,6 @@ import datetime from typing import Any, List, Optional -from langchain_core._api.deprecation import suppress_langchain_deprecation_warning from langchain_core.callbacks import ( AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, @@ -38,11 +37,6 @@ class PromptLayerOpenAI(OpenAI): pl_tags: Optional[List[str]] return_pl_id: Optional[bool] = False - def __init__(self, *kwargs: Any) -> None: - # bypass deprecation warning for ChatOpenAI - with suppress_langchain_deprecation_warning(): - super().__init__(*kwargs) - @classmethod def is_lc_serializable(cls) -> bool: return False diff --git a/libs/community/poetry.lock b/libs/community/poetry.lock index e4f801d984cee..3a8956f5d6dfe 100644 --- a/libs/community/poetry.lock +++ b/libs/community/poetry.lock @@ -3885,7 +3885,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.7" +version = "0.1.8" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -9172,4 +9172,4 @@ extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "as [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "454e721d2b68b5769f70bf8deb0d43285d270e2ca8ae99dc72c773dfa827835b" +content-hash = "a3765004062afc80420d49d782e3e023aec1707bf35ae2f808d88a1cac53b694" diff --git a/libs/community/pyproject.toml b/libs/community/pyproject.toml index ec0f6088c3a11..7a2d51147e3de 100644 --- a/libs/community/pyproject.toml +++ b/libs/community/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-community" -version = "0.0.9" +version = "0.0.10" description = "Community contributed LangChain integrations." authors = [] license = "MIT" @@ -9,7 +9,7 @@ repository = "https://github.com/langchain-ai/langchain" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.1.7,<0.2" +langchain-core = ">=0.1.8,<0.2" SQLAlchemy = ">=1.4,<3" requests = "^2" PyYAML = ">=5.3" diff --git a/libs/core/langchain_core/_api/beta_decorator.py b/libs/core/langchain_core/_api/beta_decorator.py index b82c2ee4a977a..9f4dd5b18191e 100644 --- a/libs/core/langchain_core/_api/beta_decorator.py +++ b/libs/core/langchain_core/_api/beta_decorator.py @@ -15,6 +15,8 @@ import warnings from typing import Any, Callable, Generator, Type, TypeVar +from langchain_core._api.internal import is_caller_internal + class LangChainBetaWarning(DeprecationWarning): """A class for issuing beta warnings for LangChain users.""" @@ -78,6 +80,34 @@ def beta( _addendum: str = addendum, ) -> T: """Implementation of the decorator returned by `beta`.""" + + def emit_warning() -> None: + """Emit the warning.""" + warn_beta( + message=_message, + name=_name, + obj_type=_obj_type, + addendum=_addendum, + ) + + warned = False + + def warning_emitting_wrapper(*args: Any, **kwargs: Any) -> Any: + """Wrapper for the original wrapped callable that emits a warning. + + Args: + *args: The positional arguments to the function. + **kwargs: The keyword arguments to the function. + + Returns: + The return value of the function being wrapped. + """ + nonlocal warned + if not warned and not is_caller_internal(): + warned = True + emit_warning() + return wrapped(*args, **kwargs) + if isinstance(obj, type): if not _obj_type: _obj_type = "class" @@ -85,14 +115,25 @@ def beta( _name = _name or obj.__name__ old_doc = obj.__doc__ - def finalize(wrapper: Callable[..., Any], new_doc: str) -> T: + def finalize(_: Any, new_doc: str) -> T: """Finalize the annotation of a class.""" try: obj.__doc__ = new_doc except AttributeError: # Can't set on some extension objects. pass + + def warn_if_direct_instance( + self: Any, *args: Any, **kwargs: Any + ) -> Any: + """Warn that the class is in beta.""" + nonlocal warned + if not warned and type(self) is obj and not is_caller_internal(): + warned = True + emit_warning() + return wrapped(self, *args, **kwargs) + obj.__init__ = functools.wraps(obj.__init__)( # type: ignore[misc] - wrapper + warn_if_direct_instance ) return obj @@ -155,28 +196,6 @@ def finalize( # type: ignore wrapper.__doc__ = new_doc return wrapper - def emit_warning() -> None: - """Emit the warning.""" - warn_beta( - message=_message, - name=_name, - obj_type=_obj_type, - addendum=_addendum, - ) - - def warning_emitting_wrapper(*args: Any, **kwargs: Any) -> Any: - """Wrapper for the original wrapped callable that emits a warning. - - Args: - *args: The positional arguments to the function. - **kwargs: The keyword arguments to the function. - - Returns: - The return value of the function being wrapped. - """ - emit_warning() - return wrapped(*args, **kwargs) - old_doc = inspect.cleandoc(old_doc or "").strip("\n") if not old_doc: diff --git a/libs/core/langchain_core/_api/deprecation.py b/libs/core/langchain_core/_api/deprecation.py index 6919504351df6..eee686a1811cf 100644 --- a/libs/core/langchain_core/_api/deprecation.py +++ b/libs/core/langchain_core/_api/deprecation.py @@ -16,6 +16,8 @@ import warnings from typing import Any, Callable, Generator, Type, TypeVar +from langchain_core._api.internal import is_caller_internal + class LangChainDeprecationWarning(DeprecationWarning): """A class for issuing deprecation warnings for LangChain users.""" @@ -107,6 +109,38 @@ def deprecate( _addendum: str = addendum, ) -> T: """Implementation of the decorator returned by `deprecated`.""" + + def emit_warning() -> None: + """Emit the warning.""" + warn_deprecated( + since, + message=_message, + name=_name, + alternative=_alternative, + pending=_pending, + obj_type=_obj_type, + addendum=_addendum, + removal=removal, + ) + + warned = False + + def warning_emitting_wrapper(*args: Any, **kwargs: Any) -> Any: + """Wrapper for the original wrapped callable that emits a warning. + + Args: + *args: The positional arguments to the function. + **kwargs: The keyword arguments to the function. + + Returns: + The return value of the function being wrapped. + """ + nonlocal warned + if not warned and not is_caller_internal(): + warned = True + emit_warning() + return wrapped(*args, **kwargs) + if isinstance(obj, type): if not _obj_type: _obj_type = "class" @@ -114,14 +148,25 @@ def deprecate( _name = _name or obj.__name__ old_doc = obj.__doc__ - def finalize(wrapper: Callable[..., Any], new_doc: str) -> T: + def finalize(_: Any, new_doc: str) -> T: """Finalize the deprecation of a class.""" try: obj.__doc__ = new_doc except AttributeError: # Can't set on some extension objects. pass + + def warn_if_direct_instance( + self: Any, *args: Any, **kwargs: Any + ) -> Any: + """Warn that the class is in beta.""" + nonlocal warned + if not warned and type(self) is obj and not is_caller_internal(): + warned = True + emit_warning() + return wrapped(self, *args, **kwargs) + obj.__init__ = functools.wraps(obj.__init__)( # type: ignore[misc] - wrapper + warn_if_direct_instance ) return obj @@ -184,32 +229,6 @@ def finalize( # type: ignore wrapper.__doc__ = new_doc return wrapper - def emit_warning() -> None: - """Emit the warning.""" - warn_deprecated( - since, - message=_message, - name=_name, - alternative=_alternative, - pending=_pending, - obj_type=_obj_type, - addendum=_addendum, - removal=removal, - ) - - def warning_emitting_wrapper(*args: Any, **kwargs: Any) -> Any: - """Wrapper for the original wrapped callable that emits a warning. - - Args: - *args: The positional arguments to the function. - **kwargs: The keyword arguments to the function. - - Returns: - The return value of the function being wrapped. - """ - emit_warning() - return wrapped(*args, **kwargs) - old_doc = inspect.cleandoc(old_doc or "").strip("\n") if not old_doc: diff --git a/libs/core/langchain_core/_api/internal.py b/libs/core/langchain_core/_api/internal.py new file mode 100644 index 0000000000000..0dca40ad668a8 --- /dev/null +++ b/libs/core/langchain_core/_api/internal.py @@ -0,0 +1,23 @@ +import inspect + + +def is_caller_internal(depth: int = 2) -> bool: + """Return whether the caller at `depth` of this function is internal.""" + try: + frame = inspect.currentframe() + except AttributeError: + return False + if frame is None: + return False + try: + for _ in range(depth): + frame = frame.f_back + if frame is None: + return False + caller_module = inspect.getmodule(frame) + if caller_module is None: + return False + caller_module_name = caller_module.__name__ + return caller_module_name.startswith("langchain") + finally: + del frame diff --git a/libs/core/langchain_core/load/load.py b/libs/core/langchain_core/load/load.py index 854032d9666ff..2b8cd4bb161ed 100644 --- a/libs/core/langchain_core/load/load.py +++ b/libs/core/langchain_core/load/load.py @@ -3,7 +3,7 @@ import os from typing import Any, Dict, List, Optional -from langchain_core._api import beta, suppress_langchain_beta_warning +from langchain_core._api import beta from langchain_core.load.mapping import ( OLD_PROMPT_TEMPLATE_FORMATS, SERIALIZABLE_MAPPING, @@ -125,16 +125,6 @@ def loads( return json.loads(text, object_hook=Reviver(secrets_map, valid_namespaces)) -def _loads_suppress_warning( - text: str, - *, - secrets_map: Optional[Dict[str, str]] = None, - valid_namespaces: Optional[List[str]] = None, -) -> Any: - with suppress_langchain_beta_warning(): - return loads(text, secrets_map=secrets_map, valid_namespaces=valid_namespaces) - - @beta() def load( obj: Any, @@ -166,13 +156,3 @@ def _load(obj: Any) -> Any: return obj return _load(obj) - - -def _load_suppress_warning( - obj: Any, - *, - secrets_map: Optional[Dict[str, str]] = None, - valid_namespaces: Optional[List[str]] = None, -) -> Any: - with suppress_langchain_beta_warning(): - return load(obj, secrets_map=secrets_map, valid_namespaces=valid_namespaces) diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index 8243b9df2e7eb..0bb5ddb11b953 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -1277,6 +1277,8 @@ async def _atransform_stream_with_config( """Helper method to transform an Async Iterator of Input values into an Async Iterator of Output values, with callbacks. Use this to implement `astream()` or `atransform()` in Runnable subclasses.""" + from langchain_core.tracers.log_stream import LogStreamCallbackHandler + # tee the input so we can iterate over it twice input_for_tracing, input_for_transform = atee(input, 2) # Start the input iterator to ensure the input runnable starts before this one @@ -1302,6 +1304,16 @@ async def _atransform_stream_with_config( context = copy_context() context.run(var_child_runnable_config.set, child_config) iterator = context.run(transformer, input_for_transform, **kwargs) # type: ignore[arg-type] + if stream_log := next( + ( + h + for h in run_manager.handlers + if isinstance(h, LogStreamCallbackHandler) + ), + None, + ): + # populates streamed_output in astream_log() output if needed + iterator = stream_log.tap_output_aiter(run_manager.run_id, iterator) try: while True: if accepts_context(asyncio.create_task): @@ -2733,6 +2745,7 @@ def __init__( ], ] ] = None, + name: Optional[str] = None, ) -> None: """Create a RunnableLambda from a callable, and async callable or both. @@ -2766,7 +2779,9 @@ def __init__( ) try: - if func_for_name.__name__ != "": + if name is not None: + self.name = name + elif func_for_name.__name__ != "": self.name = func_for_name.__name__ except AttributeError: pass @@ -3046,17 +3061,7 @@ async def f(*args, **kwargs): # type: ignore[no-untyped-def] def _config( self, config: Optional[RunnableConfig], callable: Callable[..., Any] ) -> RunnableConfig: - config = ensure_config(config) - - if config.get("run_name") is None: - try: - run_name = callable.__name__ - except AttributeError: - run_name = None - if run_name is not None: - return patch_config(config, run_name=run_name) - - return config + return ensure_config(config) def invoke( self, diff --git a/libs/core/langchain_core/runnables/history.py b/libs/core/langchain_core/runnables/history.py index 0dd35da8f431d..9f6ceb36735a2 100644 --- a/libs/core/langchain_core/runnables/history.py +++ b/libs/core/langchain_core/runnables/history.py @@ -14,7 +14,7 @@ ) from langchain_core.chat_history import BaseChatMessageHistory -from langchain_core.load.load import _load_suppress_warning +from langchain_core.load.load import load from langchain_core.pydantic_v1 import BaseModel, create_model from langchain_core.runnables.base import Runnable, RunnableBindingBase, RunnableLambda from langchain_core.runnables.config import run_in_executor @@ -337,7 +337,7 @@ def _exit_history(self, run: Run, config: RunnableConfig) -> None: hist = config["configurable"]["message_history"] # Get the input messages - inputs = _load_suppress_warning(run.inputs) + inputs = load(run.inputs) input_val = inputs[self.input_messages_key or "input"] input_messages = self._get_input_messages(input_val) @@ -348,7 +348,7 @@ def _exit_history(self, run: Run, config: RunnableConfig) -> None: input_messages = input_messages[len(historic_messages) :] # Get the output messages - output_val = _load_suppress_warning(run.outputs) + output_val = load(run.outputs) output_messages = self._get_output_messages(output_val) for m in input_messages + output_messages: diff --git a/libs/core/langchain_core/tracers/log_stream.py b/libs/core/langchain_core/tracers/log_stream.py index 6606dc428ff4c..9287ce8875ef2 100644 --- a/libs/core/langchain_core/tracers/log_stream.py +++ b/libs/core/langchain_core/tracers/log_stream.py @@ -12,6 +12,7 @@ Optional, Sequence, TypedDict, + TypeVar, Union, ) from uuid import UUID @@ -19,7 +20,7 @@ import jsonpatch # type: ignore[import] from anyio import create_memory_object_stream -from langchain_core.load.load import _load_suppress_warning +from langchain_core.load.load import load from langchain_core.outputs import ChatGenerationChunk, GenerationChunk from langchain_core.tracers.base import BaseTracer from langchain_core.tracers.schemas import Run @@ -128,6 +129,9 @@ def __repr__(self) -> str: return f"RunLog({pformat(self.state)})" +T = TypeVar("T") + + class LogStreamCallbackHandler(BaseTracer): """A tracer that streams run logs to a stream.""" @@ -165,6 +169,28 @@ def __init__( def __aiter__(self) -> AsyncIterator[RunLogPatch]: return self.receive_stream.__aiter__() + async def tap_output_aiter( + self, run_id: UUID, output: AsyncIterator[T] + ) -> AsyncIterator[T]: + """Tap an output async iterator to stream its values to the log.""" + async for chunk in output: + # root run is handled in .astream_log() + if run_id != self.root_id: + # if we can't find the run silently ignore + # eg. because this run wasn't included in the log + if key := self._key_map_by_run_id.get(run_id): + self.send_stream.send_nowait( + RunLogPatch( + { + "op": "add", + "path": f"/logs/{key}/streamed_output/-", + "value": chunk, + } + ) + ) + + yield chunk + def include_run(self, run: Run) -> bool: if run.id == self.root_id: return False @@ -267,7 +293,7 @@ def _on_run_update(self, run: Run) -> None: "op": "add", "path": f"/logs/{index}/final_output", # to undo the dumpd done by some runnables / tracer / etc - "value": _load_suppress_warning(run.outputs), + "value": load(run.outputs), }, { "op": "add", diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index 946e2812de72a..b93f939618c87 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-core" -version = "0.1.7" +version = "0.1.8" description = "Building applications with LLMs through composability" authors = [] license = "MIT" diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr index a6d1bdc02a181..8ff1fe98dbb91 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr @@ -472,7 +472,7 @@ # --- # name: test_combining_sequences.3 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, 'name': None}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000001'), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000002'), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000003'), Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': ['foo', 'bar']}, outputs={'question': 'foobar'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:4'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000004'), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'foobar'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nicer assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'foobar', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:5'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000005'), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['baz, qux'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nicer assistant.\nHuman: foobar']}, outputs={'generations': [[{'text': 'baz, qux', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'baz, qux'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:6'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000006'), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': AIMessage(content='baz, qux')}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:7'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000007')], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000'), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, 'name': None}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000001'), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000002'), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000003'), Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='RunnableLambda', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': ['foo', 'bar']}, outputs={'question': 'foobar'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:4'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000004'), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'foobar'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nicer assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'foobar', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:5'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000005'), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['baz, qux'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nicer assistant.\nHuman: foobar']}, outputs={'generations': [[{'text': 'baz, qux', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'baz, qux'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:6'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000006'), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': AIMessage(content='baz, qux')}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:7'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000007')], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000'), ]) # --- # name: test_each @@ -1562,6 +1562,134 @@ Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}], 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=passthrough)'}, 'name': None}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000001'), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000002'), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='passthrough', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=passthrough)'}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': 'foo'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000003')], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000'), ]) # --- +# name: test_prompt_with_llm_parser + ''' + { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "schema", + "runnable", + "RunnableSequence" + ], + "kwargs": { + "first": { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "prompts", + "chat", + "ChatPromptTemplate" + ], + "kwargs": { + "messages": [ + { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "prompts", + "chat", + "SystemMessagePromptTemplate" + ], + "kwargs": { + "prompt": { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "prompts", + "prompt", + "PromptTemplate" + ], + "kwargs": { + "input_variables": [], + "template": "You are a nice assistant.", + "template_format": "f-string", + "partial_variables": {} + } + } + } + }, + { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "prompts", + "chat", + "HumanMessagePromptTemplate" + ], + "kwargs": { + "prompt": { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "prompts", + "prompt", + "PromptTemplate" + ], + "kwargs": { + "input_variables": [ + "question" + ], + "template": "{question}", + "template_format": "f-string", + "partial_variables": {} + } + } + } + } + ], + "input_variables": [ + "question" + ] + } + }, + "middle": [ + { + "lc": 1, + "type": "not_implemented", + "id": [ + "tests", + "unit_tests", + "fake", + "llm", + "FakeStreamingListLLM" + ], + "repr": "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'])" + } + ], + "last": { + "lc": 1, + "type": "constructor", + "id": [ + "langchain", + "output_parsers", + "list", + "CommaSeparatedListOutputParser" + ], + "kwargs": {} + }, + "name": null + } + } + ''' +# --- +# name: test_prompt_with_llm_parser.1 + list([ + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeStreamingListLLM'], 'repr': "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, 'name': None}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'output': ['bear', 'dog', 'cat']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000001'), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeStreamingListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['bear, dog, cat', 'tomato, lettuce, onion'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeStreamingListLLM'], 'repr': "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'bear, dog, cat', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000002'), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': 'bear, dog, cat'}, outputs={'output': ['bear', 'dog', 'cat']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000003')], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000'), + ]) +# --- +# name: test_prompt_with_llm_parser.2 + list([ + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeStreamingListLLM'], 'repr': "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'], i=1)"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, 'name': None}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'output': ['tomato', 'lettuce', 'onion']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000001'), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeStreamingListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['bear, dog, cat', 'tomato, lettuce, onion'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 2}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeStreamingListLLM'], 'repr': "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'tomato, lettuce, onion', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000002'), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': 'tomato, lettuce, onion'}, outputs={'output': ['tomato', 'lettuce', 'onion']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000.20230101T000000000000Z00000000-0000-4000-8000-000000000003')], trace_id=UUID('00000000-0000-4000-8000-000000000000'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000000'), + Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeStreamingListLLM'], 'repr': "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'], i=1)"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, 'name': None}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your favorite color?'}, outputs={'output': ['bear', 'dog', 'cat']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'question': 'What is your favorite color?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your favorite color?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000004'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000004'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000004.20230101T000000000000Z00000000-0000-4000-8000-000000000005'), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeStreamingListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={'invocation_params': {'responses': ['bear, dog, cat', 'tomato, lettuce, onion'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 2}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeStreamingListLLM'], 'repr': "FakeStreamingListLLM(responses=['bear, dog, cat', 'tomato, lettuce, onion'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your favorite color?']}, outputs={'generations': [[{'text': 'bear, dog, cat', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000004'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000004'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000004.20230101T000000000000Z00000000-0000-4000-8000-000000000006'), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)}], inputs={'input': 'bear, dog, cat'}, outputs={'output': ['bear', 'dog', 'cat']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000004'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[], trace_id=UUID('00000000-0000-4000-8000-000000000004'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000004.20230101T000000000000Z00000000-0000-4000-8000-000000000007')], trace_id=UUID('00000000-0000-4000-8000-000000000004'), dotted_order='20230101T000000000000Z00000000-0000-4000-8000-000000000004'), + ]) +# --- # name: test_router_runnable ''' { diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index 6fb3eee4a3c68..7a8585e40186f 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -2140,6 +2140,272 @@ async def test_prompt_with_llm( assert stream_log == expected +@freeze_time("2023-01-01") +async def test_prompt_with_llm_parser( + mocker: MockerFixture, snapshot: SnapshotAssertion +) -> None: + prompt = ( + SystemMessagePromptTemplate.from_template("You are a nice assistant.") + + "{question}" + ) + llm = FakeStreamingListLLM(responses=["bear, dog, cat", "tomato, lettuce, onion"]) + parser = CommaSeparatedListOutputParser() + + chain: Runnable = prompt | llm | parser + + assert isinstance(chain, RunnableSequence) + assert chain.first == prompt + assert chain.middle == [llm] + assert chain.last == parser + assert dumps(chain, pretty=True) == snapshot + + # Test invoke + prompt_spy = mocker.spy(prompt.__class__, "ainvoke") + llm_spy = mocker.spy(llm.__class__, "ainvoke") + parser_spy = mocker.spy(parser.__class__, "ainvoke") + tracer = FakeTracer() + assert await chain.ainvoke( + {"question": "What is your name?"}, dict(callbacks=[tracer]) + ) == ["bear", "dog", "cat"] + assert prompt_spy.call_args.args[1] == {"question": "What is your name?"} + assert llm_spy.call_args.args[1] == ChatPromptValue( + messages=[ + SystemMessage(content="You are a nice assistant."), + HumanMessage(content="What is your name?"), + ] + ) + assert parser_spy.call_args.args[1] == "bear, dog, cat" + assert tracer.runs == snapshot + mocker.stop(prompt_spy) + mocker.stop(llm_spy) + mocker.stop(parser_spy) + + # Test batch + prompt_spy = mocker.spy(prompt.__class__, "abatch") + llm_spy = mocker.spy(llm.__class__, "abatch") + parser_spy = mocker.spy(parser.__class__, "abatch") + tracer = FakeTracer() + assert await chain.abatch( + [ + {"question": "What is your name?"}, + {"question": "What is your favorite color?"}, + ], + dict(callbacks=[tracer]), + ) == [["tomato", "lettuce", "onion"], ["bear", "dog", "cat"]] + assert prompt_spy.call_args.args[1] == [ + {"question": "What is your name?"}, + {"question": "What is your favorite color?"}, + ] + assert llm_spy.call_args.args[1] == [ + ChatPromptValue( + messages=[ + SystemMessage(content="You are a nice assistant."), + HumanMessage(content="What is your name?"), + ] + ), + ChatPromptValue( + messages=[ + SystemMessage(content="You are a nice assistant."), + HumanMessage(content="What is your favorite color?"), + ] + ), + ] + assert parser_spy.call_args.args[1] == [ + "tomato, lettuce, onion", + "bear, dog, cat", + ] + assert tracer.runs == snapshot + mocker.stop(prompt_spy) + mocker.stop(llm_spy) + mocker.stop(parser_spy) + + # Test stream + prompt_spy = mocker.spy(prompt.__class__, "ainvoke") + llm_spy = mocker.spy(llm.__class__, "astream") + tracer = FakeTracer() + assert [ + token + async for token in chain.astream( + {"question": "What is your name?"}, dict(callbacks=[tracer]) + ) + ] == [["tomato"], ["lettuce"], ["onion"]] + assert prompt_spy.call_args.args[1] == {"question": "What is your name?"} + assert llm_spy.call_args.args[1] == ChatPromptValue( + messages=[ + SystemMessage(content="You are a nice assistant."), + HumanMessage(content="What is your name?"), + ] + ) + + prompt_spy.reset_mock() + llm_spy.reset_mock() + stream_log = [ + part async for part in chain.astream_log({"question": "What is your name?"}) + ] + + # remove ids from logs + for part in stream_log: + for op in part.ops: + if ( + isinstance(op["value"], dict) + and "id" in op["value"] + and not isinstance(op["value"]["id"], list) # serialized lc id + ): + del op["value"]["id"] + + expected = [ + RunLogPatch( + { + "op": "replace", + "path": "", + "value": { + "logs": {}, + "final_output": None, + "streamed_output": [], + }, + } + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/ChatPromptTemplate", + "value": { + "end_time": None, + "final_output": None, + "metadata": {}, + "name": "ChatPromptTemplate", + "start_time": "2023-01-01T00:00:00.000+00:00", + "streamed_output": [], + "streamed_output_str": [], + "tags": ["seq:step:1"], + "type": "prompt", + }, + } + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/ChatPromptTemplate/final_output", + "value": ChatPromptValue( + messages=[ + SystemMessage(content="You are a nice assistant."), + HumanMessage(content="What is your name?"), + ] + ), + }, + { + "op": "add", + "path": "/logs/ChatPromptTemplate/end_time", + "value": "2023-01-01T00:00:00.000+00:00", + }, + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/FakeStreamingListLLM", + "value": { + "end_time": None, + "final_output": None, + "metadata": {}, + "name": "FakeStreamingListLLM", + "start_time": "2023-01-01T00:00:00.000+00:00", + "streamed_output": [], + "streamed_output_str": [], + "tags": ["seq:step:2"], + "type": "llm", + }, + } + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/FakeStreamingListLLM/final_output", + "value": { + "generations": [ + [ + { + "generation_info": None, + "text": "bear, dog, cat", + "type": "Generation", + } + ] + ], + "llm_output": None, + "run": None, + }, + }, + { + "op": "add", + "path": "/logs/FakeStreamingListLLM/end_time", + "value": "2023-01-01T00:00:00.000+00:00", + }, + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/CommaSeparatedListOutputParser", + "value": { + "end_time": None, + "final_output": None, + "metadata": {}, + "name": "CommaSeparatedListOutputParser", + "start_time": "2023-01-01T00:00:00.000+00:00", + "streamed_output": [], + "streamed_output_str": [], + "tags": ["seq:step:3"], + "type": "parser", + }, + } + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/CommaSeparatedListOutputParser/streamed_output/-", + "value": ["bear"], + } + ), + RunLogPatch( + {"op": "add", "path": "/streamed_output/-", "value": ["bear"]}, + {"op": "replace", "path": "/final_output", "value": ["bear"]}, + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/CommaSeparatedListOutputParser/streamed_output/-", + "value": ["dog"], + } + ), + RunLogPatch( + {"op": "add", "path": "/streamed_output/-", "value": ["dog"]}, + {"op": "add", "path": "/final_output/1", "value": "dog"}, + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/CommaSeparatedListOutputParser/streamed_output/-", + "value": ["cat"], + } + ), + RunLogPatch( + {"op": "add", "path": "/streamed_output/-", "value": ["cat"]}, + {"op": "add", "path": "/final_output/2", "value": "cat"}, + ), + RunLogPatch( + { + "op": "add", + "path": "/logs/CommaSeparatedListOutputParser/final_output", + "value": {"output": ["bear", "dog", "cat"]}, + }, + { + "op": "add", + "path": "/logs/CommaSeparatedListOutputParser/end_time", + "value": "2023-01-01T00:00:00.000+00:00", + }, + ), + ] + assert stream_log == expected + + @freeze_time("2023-01-01") async def test_stream_log_retriever() -> None: prompt = ( @@ -4606,6 +4872,14 @@ async def agen(input: str) -> AsyncIterator[int]: assert [r.inputs["input"] for r in tracer.runs[0].child_runs] == ["a", "aa", "aaa"] assert [(r.outputs or {})["output"] for r in tracer.runs[0].child_runs] == [1, 2, 3] + tracer = FakeTracer() + assert [p async for p in agen.astream_log("a", {"callbacks": [tracer]})] + assert len(tracer.runs) == 1 + assert tracer.runs[0].outputs == {"output": 6} + assert len(tracer.runs[0].child_runs) == 3 + assert [r.inputs["input"] for r in tracer.runs[0].child_runs] == ["a", "aa", "aaa"] + assert [(r.outputs or {})["output"] for r in tracer.runs[0].child_runs] == [1, 2, 3] + tracer = FakeTracer() assert await agen.abatch(["a", "a"], {"callbacks": [tracer]}) == [6, 6] assert len(tracer.runs) == 2