diff --git a/docs/cassettes/output_parser_string_28eeace3-3896-497f-93ad-544cbfb7f15c.msgpack.zlib b/docs/cassettes/output_parser_string_28eeace3-3896-497f-93ad-544cbfb7f15c.msgpack.zlib new file mode 100644 index 0000000000000..1e217f2754185 --- /dev/null +++ b/docs/cassettes/output_parser_string_28eeace3-3896-497f-93ad-544cbfb7f15c.msgpack.zlib @@ -0,0 +1 @@ +eNrNVstu20YUbZbNqv0DYtDHRpRE01ZsFUXhyInbxI6d2ECcxIEwIkfiRNQMMzNU5BpcNO0P8BPaKFIgOA+jAbJouy266A+4i35L75DUK5ZtZFEn2lAzcx9nzn3MfdxvEyEpZxcOKFNEYEfBQsaP+4I8DIlUP/VaRHnc7W5ubG0/CQU9+txTKpDlQgEHNI+Z8gQPqJN3eKvQtgotIiVuENmtcXfvnwvuPmrhTlXxJmESlQ2rODefM9BQCnbu7SPBfQL/UCiJQHDqcIDClN667WH1pTSUR4xHBMNHGJQZW5gZVwVmDpUOzxmV5W9QdF+b5S7xtZrj49Alpm0umB6mzdD0sYLLaONSCYJbIKRESGCtOPczHAy3EhwNoqqZN63hEukIGmhi9OkqUVN46oK3DGz43MFaJK9VKAtCVZWOR1oYdPZRACQRoWhy5X00FE4Wai9I3AIyyhooisCAZp8K4mpgY2l9x6E0rz0gjgLp+1HfI9iFMP770Sddj0sVHx4LzUvsOCRQJmEOd8FL/LzxPQ1yhkvqmpoBUM5IEvt40CQkMLFP26SXasWvcBD4NAVReCA5O8hCZGo0x48HOpImBJip+M3yEEdhcw8yiRnFvH0pb73qmFJhynxIBQgPQOoFyflvkwcBdppgx8yyNO6lyi8mZbiMn65jZ2NryiQWjhc/xaJVmv91cl+ETNEWifuVzePussOxOztvWflLh1OG5R5z4qd17EtyOCJ5pDKYK87ZZrFkFq03U6aJEnumw8FD/HPxxZBAn7CG8uIn9kLpmSAygOIjP/ZATYXycReCRf7+q5+Vyy8b18eh/rS7AoGL/7hN3Jxh2cYN3jbA9bxhlcrzdtkqGavr2weVzM22jtORoUhHFUhb76R18JXheFhIor4OVd1cPNyGqpJ1CN6VYaL0HS9kTeIOKjNT5HkFQ5ab2g+UcfyMcdPRO0doTI0AoD5tUWVmTQUCrpdxd75YLB59caqkgBKiTCPp2ktLS2fYBQqJil9rIkzLMi17e0jH3SNjlmbamTI8PY0HEH12iuQYTw/waGnjVOkT8Qwy0CZ149/hf7VoLSwuXN9hrte5Vlsv8dbK3eZa55L1pE1xPLDyltHgvOGTl5WrZsr6VpImcX/lzo3l9e8qBzvmLV7jQMM2BroYZ6S3RQRkZjxwfB66UOuC9ED91vKd+PUimSvai/PWXN2u2bXSknl5Yytp2z/00lb0z8d/JrlSNrIErEJeCnXRxQqPGxeaOkS54RqV9xF19blswN3stfblRbuy9+2NoL56J2ytidVH7RUQn7YCG+lzgLCUFEyyxGTa2I/3dU0svClzIDN6Ne7dz0Ev5UEVElzqHstC38+2pOacOWS4GY6QJh17+E7Z9kIO8VBN7llRZOhfdPFixkrmsVqDBt2czc0MEYBKmUs6qFzMTZ9rHJmeLlTNjf6UEYom3QYQmrf8GChIno4ToAF5Cp8OLRGZgpbuTEMaiWXA1ohCKSvZ79wRQGpCCyNOU7/Jk1jOH4kTCqETcIqQ9wFkNJlwAWDO3//b4xk8FACFUTyk5twh5dFZtcuDs0qXBxNuz4zuu3cD65RuADNqFWYqLexm61D31Ic327Ur69cJaW/aOzevhUs7lU4HpNJJdnqQTVscWI2i/yU7reOhSJuqnghHwgHcmWI/2cw62/vGsL87nrCjD4WYXWjquyipJJjx3y+WiVL+cAha3kXRqdnzrjVtndS2h/PNzItNHaKJi0xNIJMVPGMOiSYGkbemjtJSdAKcWfebPEPji/wHR26g7g== \ No newline at end of file diff --git a/docs/cassettes/output_parser_string_8ac74999-0740-4178-8efd-32a855592f71.msgpack.zlib b/docs/cassettes/output_parser_string_8ac74999-0740-4178-8efd-32a855592f71.msgpack.zlib new file mode 100644 index 0000000000000..c2023e5c1d562 --- /dev/null +++ b/docs/cassettes/output_parser_string_8ac74999-0740-4178-8efd-32a855592f71.msgpack.zlib @@ -0,0 +1 @@ +eNqFVF1MHFUUXlJJ1RdLNLYxJk4X0VS5uzM7W2BRk9JdFPnpImxTadPiZeayM93Ze4eZO5uupFax9UVFrz7U1FgrLLt1RSiRpBK01bSNxoga00SwsY1GH0x9MJo+kCbiHdjlJyDO08z5/c73nTN9uRSybJ3gkmEdU2RBhfIPm/XlLNTjIJsezSYR1YiaaY22xwYdS5+p0Cg17Vq/H5q6D2KqWcTUFZ9Ckv6U5E8i24ZxZGe6iJqeMXq9SXiok5IEwra3VpDEQLBS8BaDuGVfr9ciBuJvXsdGlpd7FcKRYOqaGpBhEO/h/W4OUZHh2hQDOioCMtgONKgnHGBAyoF6D+c0BFU+zVXPpoxGbMrGViEchYqCTAoQVoiq4zj7MP6sblYKKup2q+R5a4zmKWD5BEImgIaeQtmFLHYGmqahK9D1+w/aBA8XoAKaNtFqd96dCPBBMWVn64o4/K1pTigWRJ9c7ZPOHAI2hTo2OCV8Eg4pa877J5c7TKgkeB1QEItlF5JHlscQmw21QCXavqIktBSNDUErWRX8aLndcjDVk4jlwq2r2xWcS+1knyT5qsdWFLbTWGFD3dCw0dgiyYsp+YAYkIFYBUTp7IrSiFppoBDegb0njhQJNBCOU40NSmLNaQvZJt9B9GKWp1HH7stwsdDXX+YKazMQbVqSuiwT4cKxT/cgtVKQZGEXSQm8dVCQqmqDcq0UEJ5oiQ2HC21ia+o0FrMgtru5VvXFvcgpmoMTSM2H19yIGe/SxBbvb+hJnYLCyXAd3U+WCYqiOPPAupEWSnJm3I4ZORQK/U9dzgyibNydD0gSkORYYUpp74ywVubC4RXwZF08HNH960Qu4SlGC+tG/weewN58ATTQVfYJf+8UpfagaWuhWFOgOdzaE8YojSO74cHBlA5ZXvJJQpyQuIFGw4+DMFQ0BNrn1We5SMeuupYnw8NPgzbSRTgNMcjpwgSjbDuy+MKxvGIQR+UnbKEsT2+r62DjNSggyjWBKlFVQ3JXVQjs5JdR3INFnTPu/c//q17g22Zx08Xf7nv5Vs/8s6G5f6rxonjXsc6Kk87tyezgG1N/0AM7yuobDd87YhLnJp5q+n2yaWPpno3n/r7QsenRypcub0c3/vzr8uyVWfbu9R+i5+c+lp1HXvnu9XQG1G+b6D8l9ZwvOd7W8MvADuWZ6GOVP15F0Usj05sf3PzNhDj0/gffb9HG7z028Nk5cOTtndf23Xzo7i9O/Rq586fT5Ue2VbMToYe/3ZpqnJ6cff7G/vLbXpuK+HqOltZfazjpfe7VyPHmsYrpt24eKO+MXind2t9xz8/ypVt6Rz/v+OfNr4av38HnmZvb4FFi2oWyEo/nX+5BZNA= \ No newline at end of file diff --git a/docs/cassettes/output_parser_string_8c87553e-4f85-46c4-8f1e-666f6a261a50.msgpack.zlib b/docs/cassettes/output_parser_string_8c87553e-4f85-46c4-8f1e-666f6a261a50.msgpack.zlib new file mode 100644 index 0000000000000..53a3a03a4a273 --- /dev/null +++ b/docs/cassettes/output_parser_string_8c87553e-4f85-46c4-8f1e-666f6a261a50.msgpack.zlib @@ -0,0 +1 @@ +eNqFVF1sFFUUbosRHtQUUERCcboBUejsznS2he6D2G6haumP7dZSTK13Z253pjt77zBzd2FZ+9AqmlhNHIOiMajIsluW2lKokIiYINji7wMPmDahogKmivyYEEXFeme725+01n3Zufd855zvnO+c2x4PQd1QMMrsUhCBOhAJPRhme1yHW4LQIM/HApDIWIpWV9V69gZ1ZXCFTIhmuBwOoCl2gIisY00R7SIOOEK8IwANA/igEfViKTyU6YnYAmBbE8F+iAybi+G5fGceY0uj6M1TEZuOVUi/bEED6jZqFTGlgoh1VS8DstJgiAyZrRDQP51REFMLELNBB0hUDBHnMe7idbbWRisslqBquYkqCEqQFdgCVgaKP8iqgNBirOAEYzWVF4FAMq8PkqZUdAshQUPUFc1qhGUtg2RK/mYdBxjAqFgEFsRuuShIC5ImQ5RhAFCfiE2jTYE6UZIlRmxpcPJAwloyrUF0Bflsra00gNVtRYeSRWwCbdWURmNvCxQJRTe2xmUIJCrbcEZ2VMYGMXunSdEDRBFqhIVIxBLNYn7g265oeYwEm61WJGiLEUxqbSb8EGosUJUQjI15mQeBpqnKGAlHi4FRV0oS1mIz3ZywlGOpoIiYR4vTPBzVYTo5iOHswho7f3AbaxCgIJVKT+WglGJa0n5sskEDop/GYVNTacbGnLsnY7Bh7qsAYlXtlJBAF2VzH9ADhc7Dk+/1ICJKAJpxd/X0dCnjRDrBzvP2Nb1TAhthJJr7moFqwN7xJo+7JPK5fIHlClmOPzolNCR6mBUxzWDu4brTDVQh8hHZ3Cs4uU4dGhpdNvhcjLqRoNEepWLBr07HU+vxflX5hNTzo6VUOPN4PZTyGF5gKnGIoamdDF/ocgouelNW4elyp9J4ZtSp10OXxmimWq1Pz0VclIPID6WEe8aJGLRNVKzT/KoSUAibehuojtbRjDo5jht8YFakTjdDQVbGqFBUVPQ/cWlnIDH7rPpYnmd5wZOqMn/zIDOT59gDk+ITs/hQRstnQU7wSaOZWdH/wUfYnEiRZhXJ/Jh+N3F8qZOv31pe1rJJqwv6/BvDdZXrt/iMvSEFmAnezjM+jH0q7HFvYN2APhlsbVJ9M17aUFlc8Zi7axNbg72YtsEDaLsQRjBWC3U6cGZCVHFQoiuswxh1ryluMPvWwnxOWJtfVOAtbBa8hUVsCd2M9ByM6xy19j/5KLfFxh6ezzKL7u+Yl5H8zZGqz5Wf5LJH6z89/sdrj69b6h2+FN14os11KremJNQHlTMFuwKNI6P2/S+1Vw2cu/XJlds7jzLvbYHzVx6oONx04cuL/+T2PP3tW8sjw0u3n3+ktSNzxzefL2hz+4KLH/2ppexMtiZfXlQwr3rzn+yGBmfHqoarz+7sH6lbEH9Iqz6bvSTr+sN/ywveyYqsyrri6Gu+5+c3bpwV6vYM8YlXc56o2ZFz4M6I+cLF3c5E6Av9yJuHu+2rvb89+Ff54ps9hz786Jr91G50x3HHDxdixxrXvb1w2XeXtt78foTPDT+5+vwz0sJtK2qu/3pvZ+zGbd6MwOvRoSXZ1xxZL/84d27ZEda1E2Xd6n130V2/L3ulJLvf//WJqp67T+5yF4de7G87fei+ub+MXL3syhmooj0aHZ2T0edPDOzPzMj4F2C/3M4= \ No newline at end of file diff --git a/docs/cassettes/output_parser_string_9cbb8848-9101-465e-b230-0f7af6fb4105.msgpack.zlib b/docs/cassettes/output_parser_string_9cbb8848-9101-465e-b230-0f7af6fb4105.msgpack.zlib new file mode 100644 index 0000000000000..d2caf2a91615a --- /dev/null +++ b/docs/cassettes/output_parser_string_9cbb8848-9101-465e-b230-0f7af6fb4105.msgpack.zlib @@ -0,0 +1 @@ +eNqFVGtsFFUU3spDSNDwsuoP43SpbQid3ZlOKWyXCHVpa2laSrvKy7Lenbm7M3T23unM3UK7VEMp/IAYO6gYkSDIdpesLXSVhBhAQ7CJCDEGiKYlxQSEVBvwUWIgUfHOdrelAXH/7Nx7vnPOd853zm2PN0PdUDDK6lYQgToQCT0YZntch01haJCOWAgSGUvR2pX13kNhXel/QSZEM0qcTqApDoCIrGNNER0iDjmbeWcIGgYIQiPqx1LLQJY3Yg+BzT6CGyEy7CUMzxUWFTD2DIrerI/YdaxC+mUPG1C3U6uIKRVErKvVMiD5BkNkyGyCgP7pjIKYeoCYch0gUTFEXMB4Spfa2xqssFiCquUmqiAsQVZgF7IyUBrDrAoILcYKTjBW03kRCKXyBiHxpaNbCAkaoq5oViMsawUkE/IHdBxiAKNiEVgQh+WiIC1MfIYowxCgPhG7RpsCdaKkSozYM+DUgbRoqbQG0RUUtLe10QBWtxUdShaxcbRVUwaN/RuhSCi6oS0uQyBR2a7YZkZlbBAz+YAUR4EoQo2wEIlYolnMnmCrohUwEgxYrUjQFiOY0tpMNEKosUBVmmFs1MvsBZqmKqMknBsNjLrTkrAWmwfNCUs5lgqKiHm8NMPDWdtCJwcxnENY5OB7N7MGAQpSqfRUDkoppqXsJ+43aEBspHHY9FSasVHnI/djsGF2VQNxZf2EkEAXZbML6KHios/uv9fDiCghaMY9tQ+mSxvH0wkOnncsSk4IbLQg0ewKANWAybEmj7kkCrlCgeWKWY4/PiE0JHoLK2KawTzIHck0UIUoSGTzkFDEHdahodFlg9ti1I2EjfYoFQue/zqeXo+PV1aNSz0rupwKZ55aDaUChheYGtzM0NRFDF9cUiSU8AuZimpvtyedxvtQnZJeujRGgGpVlpmLuCiHUSOUEp6HTkS/fbxineZXlZBC2PTbQHW0jma0iOO4/rxHInW6GQqyMkYFl8v1P3FpZyAxj1n1sTzP8oI3XaWwrp95mOfoA5PmE7P4UEa5j0CO88mgmUei/4PPwnWJNGlWkcyT9NvH8aVevM6vlVVt3FwceKW+pnCVJq0Q6g41K8BM8A6eCWIcVOFRTznrAfTJYOtT6pvx5WtrSqsrPd1r2Drsx7QNXkDbhTCCsXqo04EzE6KKwxJdYR3GqHtd6Vrz2GJYyAmLBaEY+BcL/mIX+xLdjMwcjOkctfY/9ShvjY0+PH1ZG57fNc2W+k16bdWqqjPLZvyzoGLDls/fCg/W/loN24c6klsPv3x2oPvUzdbjg9f3Gbe/rPrk6m/zc+9+scW3e97QY3MvHTw/9azq+iD/h5+vX8xhv41/dKLvalPrvemz7/R2ZZflvF7JnHZfzOYmP/cMu61j4ClpWSnisi9VRa5t2N46J3LmtPbujB2ezk73OenZS8O/b8q7WHHuVnv7CUFcXhZY8KRj5+41Wpn97SvvwGT+TztPrnE5b8h9PRUfDne656Pvht4beVX0LZm5a29H/p97r1Rq2wuONbh88xqD7jfcPw5/6t8aWXL7oPnmnn3C2hk39w82DSwJbJPqdlyfFh/uG7nw/fD7W27kud1Tp2cP71k9O7g0e31isHxqePK1P2qy5/61qG1K9UheTu/lwH6OHJhz98gs+emzNyZ/Uy7mfnXr8V/u9HQmL3c+ceACKThd9mJPQ271yBSb7d69SbYVd/6Wr2bZbP8COXvwZg== \ No newline at end of file diff --git a/docs/docs/concepts/output_parsers.mdx b/docs/docs/concepts/output_parsers.mdx index d15bc6fdb6894..f2cf62c04713f 100644 --- a/docs/docs/concepts/output_parsers.mdx +++ b/docs/docs/concepts/output_parsers.mdx @@ -26,6 +26,7 @@ LangChain has lots of different types of output parsers. This is a list of outpu | Name | Supports Streaming | Has Format Instructions | Calls LLM | Input Type | Output Type | Description | |-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------|-------------------------|-----------|--------------------|----------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [Str](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.string.StrOutputParser.html) | ✅ | | | `str` \| `Message` | String | Parses texts from message objects. Useful for handling variable formats of message content (e.g., extracting text from content blocks). | | [JSON](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.json.JSONOutputParser.html#langchain_core.output_parsers.json.JSONOutputParser) | ✅ | ✅ | | `str` \| `Message` | JSON object | Returns a JSON object as specified. You can specify a Pydantic model and it will return JSON for that model. Probably the most reliable output parser for getting structured data that does NOT use function calling. | | [XML](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.xml.XMLOutputParser.html#langchain_core.output_parsers.xml.XMLOutputParser) | ✅ | ✅ | | `str` \| `Message` | `dict` | Returns a dictionary of tags. Use when XML output is needed. Use with models that are good at writing XML (like Anthropic's). | | [CSV](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.list.CommaSeparatedListOutputParser.html#langchain_core.output_parsers.list.CommaSeparatedListOutputParser) | ✅ | ✅ | | `str` \| `Message` | `List[str]` | Returns a list of comma separated values. | diff --git a/docs/docs/concepts/runnables.mdx b/docs/docs/concepts/runnables.mdx index e37022aa52490..dea928568a735 100644 --- a/docs/docs/concepts/runnables.mdx +++ b/docs/docs/concepts/runnables.mdx @@ -315,7 +315,7 @@ the `RunnableConfig` manually to sub-calls in some cases. Please see the [Propagating RunnableConfig](#propagation-of-runnableconfig) section for more information. ::: -## Creating a runnable from a function +## Creating a runnable from a function {#custom-runnables} You may need to create a custom Runnable that runs arbitrary logic. This is especially useful if using [LangChain Expression Language (LCEL)](/docs/concepts/lcel) to compose diff --git a/docs/docs/concepts/tool_calling.mdx b/docs/docs/concepts/tool_calling.mdx index c3c753ee52570..438c52ccb25a6 100644 --- a/docs/docs/concepts/tool_calling.mdx +++ b/docs/docs/concepts/tool_calling.mdx @@ -128,7 +128,7 @@ For more details on usage, see our [how-to guides](/docs/how_to/#tools)! [Tools](/docs/concepts/tools/) implement the [Runnable](/docs/concepts/runnables/) interface, which means that they can be invoked (e.g., `tool.invoke(args)`) directly. -[LangGraph](https://langchain-ai.github.io/langgraph/) offers pre-built components (e.g., [`ToolNode`](https://langchain-ai.github.io/langgraph/reference/prebuilt/#toolnode)) that will often invoke the tool in behalf of the user. +[LangGraph](https://langchain-ai.github.io/langgraph/) offers pre-built components (e.g., [`ToolNode`](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.tool_node.ToolNode)) that will often invoke the tool in behalf of the user. :::info[Further reading] diff --git a/docs/docs/contributing/how_to/documentation/style_guide.mdx b/docs/docs/contributing/how_to/documentation/style_guide.mdx index 437977573c09f..2eb20d6853786 100644 --- a/docs/docs/contributing/how_to/documentation/style_guide.mdx +++ b/docs/docs/contributing/how_to/documentation/style_guide.mdx @@ -4,8 +4,8 @@ sidebar_class_name: "hidden" # Documentation Style Guide -As LangChain continues to grow, the surface area of documentation required to cover it continues to grow too. -This page provides guidelines for anyone writing documentation for LangChain, as well as some of our philosophies around +As LangChain continues to grow, the amount of documentation required to cover the various concepts and integrations continues to grow too. +This page provides guidelines for anyone writing documentation for LangChain and outlines some of our philosophies around organization and structure. ## Philosophy @@ -18,9 +18,9 @@ Under this framework, all documentation falls under one of four categories: [Tut ### Tutorials Tutorials are lessons that take the reader through a practical activity. Their purpose is to help the user -gain understanding of concepts and how they interact by showing one way to achieve some goal in a hands-on way. They should **avoid** giving -multiple permutations of ways to achieve that goal in-depth. Instead, it should guide a new user through a recommended path to accomplishing the tutorial's goal. While the end result of a tutorial does not necessarily need to -be completely production-ready, it should be useful and practically satisfy the the goal that you clearly stated in the tutorial's introduction. Information on how to address additional scenarios +gain an understanding of concepts and how they interact by showing one way to achieve a specific goal in a hands-on manner. They should **avoid** giving +multiple permutations of ways to achieve that goal in-depth. Instead, it should guide a new user through a recommended path to accomplish the tutorial's goal. While the end result of a tutorial does not necessarily need to +be completely production-ready, it should be useful and practically satisfy the goal that is clearly stated in the tutorial's introduction. Information on how to address additional scenarios belongs in how-to guides. To quote the Diataxis website: @@ -53,8 +53,8 @@ Here are some high-level tips on writing a good tutorial: ### How-to guides A how-to guide, as the name implies, demonstrates how to do something discrete and specific. -It should assume that the user is already familiar with underlying concepts, and is trying to solve an immediate problem, but -should still give some background or list the scenarios where the information contained within can be relevant. +It should assume that the user is already familiar with underlying concepts, and is focused on solving an immediate problem. However, +it should still provide some background or list certain scenarios where the information may be relevant. They can and should discuss alternatives if one approach may be better than another in certain cases. To quote the Diataxis website: @@ -79,10 +79,10 @@ Here are some high-level tips on writing a good how-to guide: ### Conceptual guide -LangChain's conceptual guide falls under the **Explanation** quadrant of Diataxis. They should cover LangChain terms and concepts -in a more abstract way than how-to guides or tutorials, and should be geared towards curious users interested in -gaining a deeper understanding of the framework. Try to avoid excessively large code examples - the goal here is to -impart perspective to the user rather than to finish a practical project. These guides should cover **why** things work they way they do. +LangChain's conceptual guide falls under the **Explanation** quadrant of Diataxis. These guides should cover LangChain terms and concepts +in a more abstract way than how-to guides or tutorials, targeting curious users interested in +gaining a deeper understanding and insights of the framework. Try to avoid excessively large code examples as the primary goal is to +provide perspective to the user rather than to finish a practical project. These guides should cover **why** things work they way they do. This guide on documentation style is meant to fall under this category. @@ -137,14 +137,14 @@ be only one (very rarely two), canonical pages for a given concept or feature. I ### Link to other sections -Because sections of the docs do not exist in a vacuum, it is important to link to other sections as often as possible -to allow a developer to learn more about an unfamiliar topic inline. +Because sections of the docs do not exist in a vacuum, it is important to link to other sections frequently, +to allow a developer to learn more about an unfamiliar topic within the flow of reading. -This includes linking to the API references as well as conceptual sections! +This includes linking to the API references and conceptual sections! ### Be concise -In general, take a less-is-more approach. If a section with a good explanation of a concept already exists, you should link to it rather than +In general, take a less-is-more approach. If another section with a good explanation of a concept exists, you should link to it rather than re-explain it, unless the concept you are documenting presents some new wrinkle. Be concise, including in code samples. diff --git a/docs/docs/how_to/index.mdx b/docs/docs/how_to/index.mdx index 8f26f725158d9..76f74934e572b 100644 --- a/docs/docs/how_to/index.mdx +++ b/docs/docs/how_to/index.mdx @@ -115,6 +115,7 @@ What LangChain calls [LLMs](/docs/concepts/text_llms) are older forms of languag [Output Parsers](/docs/concepts/output_parsers) are responsible for taking the output of an LLM and parsing into more structured format. +- [How to: parse text from message objects](/docs/how_to/output_parser_string) - [How to: use output parsers to parse an LLM response into structured format](/docs/how_to/output_parser_structured) - [How to: parse JSON output](/docs/how_to/output_parser_json) - [How to: parse XML output](/docs/how_to/output_parser_xml) diff --git a/docs/docs/how_to/output_parser_string.ipynb b/docs/docs/how_to/output_parser_string.ipynb new file mode 100644 index 0000000000000..17a2474e1e942 --- /dev/null +++ b/docs/docs/how_to/output_parser_string.ipynb @@ -0,0 +1,202 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "1d6024e0-3847-4418-b8a8-6b8f83adf4c2", + "metadata": {}, + "source": [ + "# How to parse text from message objects\n", + "\n", + ":::info Prerequisites\n", + "\n", + "This guide assumes familiarity with the following concepts:\n", + "- [Chat models](/docs/concepts/chat_models/)\n", + "- [Messages](/docs/concepts/messages/)\n", + "- [Output parsers](/docs/concepts/output_parsers/)\n", + "- [LangChain Expression Language (LCEL)](/docs/concepts/lcel/)\n", + "\n", + ":::\n", + "\n", + "LangChain [message](/docs/concepts/messages/) objects support content in a [variety of formats](/docs/concepts/messages/#content), including text, [multimodal data](/docs/concepts/multimodality/), and a list of [content block](/docs/concepts/messages/#aimessage) dicts.\n", + "\n", + "The format of [Chat model](/docs/concepts/chat_models/) response content may depend on the provider. For example, the chat model for [Anthropic](/docs/integrations/chat/anthropic/) will return string content for typical string input:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "8ac74999-0740-4178-8efd-32a855592f71", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Hi there! How are you doing today? Is there anything I can help you with?'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain_anthropic import ChatAnthropic\n", + "\n", + "llm = ChatAnthropic(model=\"claude-3-5-haiku-latest\")\n", + "\n", + "response = llm.invoke(\"Hello\")\n", + "response.content" + ] + }, + { + "cell_type": "markdown", + "id": "69b7c3ae-0022-4737-9db7-f44db3402de2", + "metadata": {}, + "source": [ + "But when tool calls are generated, the response content is structured into content blocks that convey the model's reasoning process:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "8c87553e-4f85-46c4-8f1e-666f6a261a50", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'text': \"I'll help you get the current weather for San Francisco, California. Let me check that for you right away.\",\n", + " 'type': 'text'},\n", + " {'id': 'toolu_015PwwcKxWYctKfY3pruHFyy',\n", + " 'input': {'location': 'San Francisco, CA'},\n", + " 'name': 'get_weather',\n", + " 'type': 'tool_use'}]" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain_core.tools import tool\n", + "\n", + "\n", + "@tool\n", + "def get_weather(location: str) -> str:\n", + " \"\"\"Get the weather from a location.\"\"\"\n", + "\n", + " return \"Sunny.\"\n", + "\n", + "\n", + "llm_with_tools = llm.bind_tools([get_weather])\n", + "\n", + "response = llm_with_tools.invoke(\"What's the weather in San Francisco, CA?\")\n", + "response.content" + ] + }, + { + "cell_type": "markdown", + "id": "039f6d62-098f-42c9-8b07-43cb1f2a831b", + "metadata": {}, + "source": [ + "To automatically parse text from message objects irrespective of the format of the underlying content, we can use [StrOutputParser](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.string.StrOutputParser.html). We can compose it with a chat model as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "0bb9b4dd-64a9-463d-9c71-df147630f3c3", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.output_parsers import StrOutputParser\n", + "\n", + "chain = llm_with_tools | StrOutputParser()" + ] + }, + { + "cell_type": "markdown", + "id": "4929c724-471f-4f77-a231-36e9af9418a3", + "metadata": {}, + "source": [ + "[StrOutputParser](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.string.StrOutputParser.html) simplifies the extraction of text from message objects:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9cbb8848-9101-465e-b230-0f7af6fb4105", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I'll help you check the weather in San Francisco, CA right away.\n" + ] + } + ], + "source": [ + "response = chain.invoke(\"What's the weather in San Francisco, CA?\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "id": "13642ad5-325d-4d9b-b97e-cac40345bfbc", + "metadata": {}, + "source": [ + "This is particularly useful in streaming contexts:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "28eeace3-3896-497f-93ad-544cbfb7f15c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "|I'll| help| you get| the current| weather for| San Francisco, California|. Let| me retrieve| that| information for you.||||||||||" + ] + } + ], + "source": [ + "for chunk in chain.stream(\"What's the weather in San Francisco, CA?\"):\n", + " print(chunk, end=\"|\")" + ] + }, + { + "cell_type": "markdown", + "id": "858e2071-a483-404e-9eca-c73a4466fd83", + "metadata": {}, + "source": [ + "See the [API Reference](https://python.langchain.com/api_reference/core/output_parsers/langchain_core.output_parsers.string.StrOutputParser.html) for more information." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/chat/cloudflare_workersai.ipynb b/docs/docs/integrations/chat/cloudflare_workersai.ipynb index df7c2a1cb667b..571cf32282fb8 100644 --- a/docs/docs/integrations/chat/cloudflare_workersai.ipynb +++ b/docs/docs/integrations/chat/cloudflare_workersai.ipynb @@ -17,7 +17,7 @@ "source": [ "# ChatCloudflareWorkersAI\n", "\n", - "This will help you getting started with CloudflareWorkersAI [chat models](/docs/concepts/#chat-models). For detailed documentation of all available Cloudflare WorkersAI models head to the [API reference](https://developers.cloudflare.com/workers-ai/).\n", + "This will help you getting started with CloudflareWorkersAI [chat models](/docs/concepts/chat_models). For detailed documentation of all available Cloudflare WorkersAI models head to the [API reference](https://developers.cloudflare.com/workers-ai/).\n", "\n", "\n", "## Overview\n", diff --git a/docs/docs/integrations/document_loaders/google_cloud_sql_mssql.ipynb b/docs/docs/integrations/document_loaders/google_cloud_sql_mssql.ipynb index 1dd568c85c7ea..42ac2892cb6d4 100644 --- a/docs/docs/integrations/document_loaders/google_cloud_sql_mssql.ipynb +++ b/docs/docs/integrations/document_loaders/google_cloud_sql_mssql.ipynb @@ -34,7 +34,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -328,7 +328,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The view generated from SQL query can have different schema than default table. In such cases, the behavior of MSSQLLoader is the same as loading from table with non-default schema. Please refer to section [Load documents with customized document page content & metadata](#Load-documents-with-customized-document-page-content-&-metadata)." + "The view generated from SQL query can have different schema than default table. In such cases, the behavior of MSSQLLoader is the same as loading from table with non-default schema. Please refer to section [Load documents with customized document page content & metadata](#load-documents-with-customized-document-page-content--metadata)." ] }, { @@ -633,7 +633,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/docs/integrations/document_loaders/google_cloud_sql_mysql.ipynb b/docs/docs/integrations/document_loaders/google_cloud_sql_mysql.ipynb index d656b8642f47e..5743fdedc543a 100644 --- a/docs/docs/integrations/document_loaders/google_cloud_sql_mysql.ipynb +++ b/docs/docs/integrations/document_loaders/google_cloud_sql_mysql.ipynb @@ -317,7 +317,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The view generated from SQL query can have different schema than default table. In such cases, the behavior of MySQLLoader is the same as loading from table with non-default schema. Please refer to section [Load documents with customized document page content & metadata](#Load-documents-with-customized-document-page-content-&-metadata)." + "The view generated from SQL query can have different schema than default table. In such cases, the behavior of MySQLLoader is the same as loading from table with non-default schema. Please refer to section [Load documents with customized document page content & metadata](#load-documents-with-customized-document-page-content--metadata)." ] }, { @@ -619,7 +619,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.6" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/docs/integrations/providers/astradb.mdx b/docs/docs/integrations/providers/astradb.mdx index d545d1ea02625..853eafcc8ff5d 100644 --- a/docs/docs/integrations/providers/astradb.mdx +++ b/docs/docs/integrations/providers/astradb.mdx @@ -133,7 +133,7 @@ store = AstraDBStore( ) ``` -Learn more in the [example notebook](/docs/integrations/stores/astradb#astradbstore). +See the API Reference for the [AstraDBStore](https://python.langchain.com/api_reference/astradb/storage/langchain_astradb.storage.AstraDBStore.html). ## Byte Store @@ -147,4 +147,4 @@ store = AstraDBByteStore( ) ``` -Learn more in the [example notebook](/docs/integrations/stores/astradb#astradbbytestore). +See the API reference for the [AstraDBByteStore](https://python.langchain.com/api_reference/astradb/storage/langchain_astradb.storage.AstraDBByteStore.html). diff --git a/docs/docs/integrations/providers/nvidia.mdx b/docs/docs/integrations/providers/nvidia.mdx index 0f02b3522367e..2dc6bf2f43837 100644 --- a/docs/docs/integrations/providers/nvidia.mdx +++ b/docs/docs/integrations/providers/nvidia.mdx @@ -51,7 +51,7 @@ result = llm.invoke("Write a ballad about LangChain.") print(result.content) ``` -Using the API, you can query live endpoints available on the NVIDIA API Catalog to get quick results from a DGX-hosted cloud compute environment. All models are source-accessible and can be deployed on your own compute cluster using NVIDIA NIM which is part of NVIDIA AI Enterprise, shown in the next section [Working with NVIDIA NIMs](##working-with-nvidia-nims). +Using the API, you can query live endpoints available on the NVIDIA API Catalog to get quick results from a DGX-hosted cloud compute environment. All models are source-accessible and can be deployed on your own compute cluster using NVIDIA NIM which is part of NVIDIA AI Enterprise, shown in the next section [Working with NVIDIA NIMs](#working-with-nvidia-nims). ## Working with NVIDIA NIMs When ready to deploy, you can self-host models with NVIDIA NIM—which is included with the NVIDIA AI Enterprise software license—and run them anywhere, giving you ownership of your customizations and full control of your intellectual property (IP) and AI applications. diff --git a/docs/docs/integrations/providers/unstructured.mdx b/docs/docs/integrations/providers/unstructured.mdx index 33510cf5e4803..312a28d6f6815 100644 --- a/docs/docs/integrations/providers/unstructured.mdx +++ b/docs/docs/integrations/providers/unstructured.mdx @@ -164,7 +164,7 @@ from langchain_community.document_loaders import UnstructuredOrgModeLoader ### UnstructuredPDFLoader -See a [usage example](/docs/how_to/document_loader_pdf#using-unstructured). +See a [usage example](/docs/how_to/document_loader_pdf/#layout-analysis-and-extraction-of-text-from-images). ```python from langchain_community.document_loaders import UnstructuredPDFLoader diff --git a/docs/docs/integrations/tools/google_books.ipynb b/docs/docs/integrations/tools/google_books.ipynb index 57446c435f638..0954a1f15067c 100644 --- a/docs/docs/integrations/tools/google_books.ipynb +++ b/docs/docs/integrations/tools/google_books.ipynb @@ -139,7 +139,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### [Invoke directly with args](/docs/concepts/#invoke-with-just-the-arguments)\n", + "### [Invoke directly with args](/docs/concepts/tools)\n", "\n", "See below for an direct invocation example." ] @@ -165,7 +165,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### [Invoke with ToolCall](/docs/concepts/#invoke-with-toolcall)\n", + "### [Invoke with ToolCall](/docs/concepts/tools)\n", "\n", "See below for a tool call example." ] diff --git a/docs/docs/integrations/tools/zapier.ipynb b/docs/docs/integrations/tools/zapier.ipynb index a6deab263082e..3c73d1f15ac11 100644 --- a/docs/docs/integrations/tools/zapier.ipynb +++ b/docs/docs/integrations/tools/zapier.ipynb @@ -110,19 +110,19 @@ "text": [ "\n", "\n", - "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n", - "\u001b[32;1m\u001b[1;3m I need to find the email and summarize it.\n", + "\u001B[1m> Entering new AgentExecutor chain...\u001B[0m\n", + "\u001B[32;1m\u001B[1;3m I need to find the email and summarize it.\n", "Action: Gmail: Find Email\n", - "Action Input: Find the latest email from Silicon Valley Bank\u001b[0m\n", - "Observation: \u001b[31;1m\u001b[1;3m{\"from__name\": \"Silicon Valley Bridge Bank, N.A.\", \"from__email\": \"sreply@svb.com\", \"body_plain\": \"Dear Clients, After chaotic, tumultuous & stressful days, we have clarity on path for SVB, FDIC is fully insuring all deposits & have an ask for clients & partners as we rebuild. Tim Mayopoulos Finished chain.\u001b[0m\n" + "\u001B[1m> Finished chain.\u001B[0m\n" ] }, { @@ -286,18 +286,18 @@ "text": [ "\n", "\n", - "\u001b[1m> Entering new SimpleSequentialChain chain...\u001b[0m\n", - "\u001b[36;1m\u001b[1;3m{\"from__name\": \"Silicon Valley Bridge Bank, N.A.\", \"from__email\": \"sreply@svb.com\", \"body_plain\": \"Dear Clients, After chaotic, tumultuous & stressful days, we have clarity on path for SVB, FDIC is fully insuring all deposits & have an ask for clients & partners as we rebuild. Tim Mayopoulos Entering new SimpleSequentialChain chain...\u001B[0m\n", + "\u001B[36;1m\u001B[1;3m{\"from__name\": \"Silicon Valley Bridge Bank, N.A.\", \"from__email\": \"sreply@svb.com\", \"body_plain\": \"Dear Clients, After chaotic, tumultuous & stressful days, we have clarity on path for SVB, FDIC is fully insuring all deposits & have an ask for clients & partners as we rebuild. Tim Mayopoulos Finished chain.\u001b[0m\n" + "\u001B[1m> Finished chain.\u001B[0m\n" ] }, { @@ -325,7 +325,7 @@ "id": "09ff954e-45f2-4595-92ea-91627abde4a0", "metadata": {}, "source": [ - "## Example Using OAuth Access Token\n", + "## Example Using OAuth Access Token{#oauth}\n", "The below snippet shows how to initialize the wrapper with a procured OAuth access token. Note the argument being passed in as opposed to setting an environment variable. Review the [authentication docs](https://nla.zapier.com/docs/authentication/#oauth-credentials) for full user-facing oauth developer support.\n", "\n", "The developer is tasked with handling the OAuth handshaking to procure and refresh the access token." diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 305b14df135ac..fc00559683ac9 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -26,6 +26,7 @@ const config = { trailingSlash: true, onBrokenLinks: "throw", onBrokenMarkdownLinks: "throw", + onBrokenAnchors: "throw", themes: ["@docusaurus/theme-mermaid"], markdown: {