diff --git a/libs/partners/prompty/poetry.lock b/libs/partners/prompty/poetry.lock index 73ebd1174e5de..16ba1be41323b 100644 --- a/libs/partners/prompty/poetry.lock +++ b/libs/partners/prompty/poetry.lock @@ -148,9 +148,6 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - [[package]] name = "anyio" version = "4.4.0" @@ -632,7 +629,7 @@ files = [ [[package]] name = "langchain" -version = "0.2.15" +version = "0.2.16" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -642,7 +639,7 @@ develop = true [package.dependencies] aiohttp = "^3.8.3" async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""} -langchain-core = "^0.2.35" +langchain-core = "^0.2.38" langchain-text-splitters = "^0.2.0" langsmith = "^0.1.17" numpy = [ @@ -661,10 +658,10 @@ url = "../../langchain" [[package]] name = "langchain-core" -version = "0.2.36" +version = "0.2.38" description = "Building applications with LLMs through composability" optional = false -python-versions = ">=3.8.1,<4.0" +python-versions = ">=3.9,<4.0" files = [] develop = true @@ -686,7 +683,7 @@ url = "../../core" [[package]] name = "langchain-text-splitters" -version = "0.2.3" +version = "0.2.4" description = "LangChain text splitting utilities" optional = false python-versions = ">=3.8.1,<4.0" @@ -694,7 +691,7 @@ files = [] develop = true [package.dependencies] -langchain-core = "^0.2.10" +langchain-core = "^0.2.38" [package.source] type = "directory" @@ -880,43 +877,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - [[package]] name = "numpy" version = "1.26.4" @@ -1712,5 +1672,5 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" -python-versions = ">=3.8.1,<4.0" -content-hash = "a8ac2ae4de4a64bd1b7bc8a36af02073196a3261f663f6da52a2e82818931064" +python-versions = ">=3.9,<4.0" +content-hash = "66290ecf222b184bd4ec2aff94d26b9e316d1776f1a3558173f174411905c062" diff --git a/libs/partners/prompty/pyproject.toml b/libs/partners/prompty/pyproject.toml index 7ae1504e48cd1..6bf999298f3b8 100644 --- a/libs/partners/prompty/pyproject.toml +++ b/libs/partners/prompty/pyproject.toml @@ -12,10 +12,11 @@ license = "MIT" "Release Notes" = "https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain-prompty%3D%3D0%22&expanded=true" [tool.poetry.dependencies] -python = ">=3.8.1,<4.0" +python = ">=3.9,<4.0" langchain-core = "^0.2.36" pyyaml = "^6.0.1" types-pyyaml = "^6.0.12.20240311" +pydantic = ">=2,<3" [tool.poetry.group.test] optional = true diff --git a/libs/partners/prompty/scripts/check_pydantic.sh b/libs/partners/prompty/scripts/check_pydantic.sh deleted file mode 100644 index 06b5bb81ae236..0000000000000 --- a/libs/partners/prompty/scripts/check_pydantic.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# This script searches for lines starting with "import pydantic" or "from pydantic" -# in tracked files within a Git repository. -# -# Usage: ./scripts/check_pydantic.sh /path/to/repository - -# Check if a path argument is provided -if [ $# -ne 1 ]; then - echo "Usage: $0 /path/to/repository" - exit 1 -fi - -repository_path="$1" - -# Search for lines matching the pattern within the specified repository -result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') - -# Check if any matching lines were found -if [ -n "$result" ]; then - echo "ERROR: The following lines need to be updated:" - echo "$result" - echo "Please replace the code with an import from langchain_core.pydantic_v1." - echo "For example, replace 'from pydantic import BaseModel'" - echo "with 'from langchain_core.pydantic_v1 import BaseModel'" - exit 1 -fi diff --git a/libs/partners/prompty/tests/unit_tests/fake_callback_handler.py b/libs/partners/prompty/tests/unit_tests/fake_callback_handler.py index fd68bebd2d9c5..cfbfa4f6f81b2 100644 --- a/libs/partners/prompty/tests/unit_tests/fake_callback_handler.py +++ b/libs/partners/prompty/tests/unit_tests/fake_callback_handler.py @@ -6,7 +6,7 @@ from langchain_core.callbacks import AsyncCallbackHandler, BaseCallbackHandler from langchain_core.messages import BaseMessage -from langchain_core.pydantic_v1 import BaseModel +from pydantic import BaseModel class BaseFakeCallbackHandler(BaseModel): @@ -259,7 +259,8 @@ def on_retriever_error( ) -> Any: self.on_retriever_error_common() - def __deepcopy__(self, memo: dict) -> "FakeCallbackHandler": + # Overriding since BaseModel has __deepcopy__ method as well + def __deepcopy__(self, memo: dict) -> "FakeCallbackHandler": # type: ignore return self @@ -391,5 +392,6 @@ async def on_text( ) -> None: self.on_text_common() - def __deepcopy__(self, memo: dict) -> "FakeAsyncCallbackHandler": + # Overriding since BaseModel has __deepcopy__ method as well + def __deepcopy__(self, memo: dict) -> "FakeAsyncCallbackHandler": # type: ignore return self diff --git a/libs/partners/prompty/tests/unit_tests/fake_chat_model.py b/libs/partners/prompty/tests/unit_tests/fake_chat_model.py index a39f401e64349..0bfc795f7c656 100644 --- a/libs/partners/prompty/tests/unit_tests/fake_chat_model.py +++ b/libs/partners/prompty/tests/unit_tests/fake_chat_model.py @@ -21,7 +21,7 @@ def _call( run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, ) -> str: - return json.dumps([message.dict() for message in messages]) + return json.dumps([message.model_dump() for message in messages]) async def _agenerate( self, diff --git a/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py b/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py index 23bf299ee56b5..9233de366538e 100644 --- a/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py +++ b/libs/partners/prompty/tests/unit_tests/test_prompty_serialization.py @@ -6,8 +6,8 @@ from langchain.tools import tool from langchain_core.language_models import FakeListLLM from langchain_core.messages import AIMessage, HumanMessage -from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.utils.function_calling import convert_to_openai_function +from pydantic import BaseModel, Field import langchain_prompty