From 8ad1c7ca36a6ed097ddd0757c8f0d379955fdb26 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Tue, 20 Aug 2024 12:14:33 -0700 Subject: [PATCH] docs[minor]: Add missing items to sidebar, add new tools doc (#332) --- docs/mkdocs.yml | 14 + examples/how-tos/tool-calling.ipynb | 605 ++++++++++++++++++++++++++++ 2 files changed, 619 insertions(+) create mode 100644 examples/how-tos/tool-calling.ipynb diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 55db9af50..7c68905db 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -96,14 +96,24 @@ nav: - Persistence: - Add persistence ("memory") to your graph: "how-tos/persistence.ipynb" - View and update past graph state: "how-tos/time-travel.ipynb" + - Manage conversation history: how-tos/manage-conversation-history.ipynb - Create a custom checkpointer using Postgres: "how-tos/persistence-postgres.ipynb" - Human-in-the-loop: - Add human-in-the-loop: "how-tos/human-in-the-loop.ipynb" + - Add breakpoints: how-tos/breakpoints.ipynb + - Wait for user input: how-tos/wait-user-input.ipynb + - View and update past graph state: how-tos/time-travel.ipynb + - Edit graph state: how-tos/edit-graph-state.ipynb - Streaming: - Stream full state: "how-tos/stream-values.ipynb" - Stream state updates: "how-tos/stream-updates.ipynb" - Stream LLM tokens: "how-tos/stream-tokens.ipynb" - Stream LLM tokens without LangChain models: "how-tos/streaming-tokens-without-langchain.ipynb" + - Tool calling: + - Call tools using ToolNode: how-tos/tool-calling.ipynb + - Handle tool calling errors: how-tos/tool-calling-errors.ipynb + # - Pass graph state to tools: how-tos/pass-run-time-values-to-tools.ipynb TODO + # - Pass config to tools: how-tos/pass-config-to-tools.ipynb TODO - Other: - Add runtime configuration: "how-tos/configuration.ipynb" - Force an agent to call a tool: "how-tos/force-calling-a-tool-first.ipynb" @@ -113,6 +123,10 @@ nav: - Manage agent steps: "how-tos/managing-agent-steps.ipynb" - "Conceptual Guides": - "concepts/index.md" + - LangGraph for Agentic Applications: concepts/high_level.md + - Low Level LangGraph Concepts: concepts/low_level.md + - Common Agentic Patterns: concepts/agentic_concepts.md + - FAQ: concepts/faq.md - "Reference": - "reference/index.html" diff --git a/examples/how-tos/tool-calling.ipynb b/examples/how-tos/tool-calling.ipynb new file mode 100644 index 000000000..9cdb2ecfd --- /dev/null +++ b/examples/how-tos/tool-calling.ipynb @@ -0,0 +1,605 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to call tools using ToolNode\n", + "\n", + "This guide covers how to use LangGraph's prebuilt [`ToolNode`](https://langchain-ai.github.io/langgraphjs/reference/classes/prebuilt.ToolNode.html) for tool calling.\n", + "\n", + "`ToolNode` is a LangChain Runnable that takes graph state (with a list of messages) as input and outputs state update with the result of tool calls. It is designed to work well out-of-box with LangGraph's prebuilt ReAct agent, but can also work with any `StateGraph` as long as its state has a `messages` key with an appropriate reducer (see [`MessagesState`](https://github.com/langchain-ai/langgraphjs/blob/bcefdd0cfa1727104012993326462b5ebca46f79/langgraph/src/graph/message.ts#L79))." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n", + "\n", + "```bash\n", + "npm install @langchain/langgraph @langchain/anthropic zod\n", + "```\n", + "\n", + "Set env vars:\n", + "\n", + "```typescript\n", + "process.env.ANTHROPIC_API_KEY = 'your-anthropic-api-key';\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Define tools" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import { tool } from '@langchain/core/tools';\n", + "import { z } from 'zod';\n", + "\n", + "const getWeather = tool((input) => {\n", + " if (['sf', 'san francisco'].includes(input.location.toLowerCase())) {\n", + " return 'It\\'s 60 degrees and foggy.';\n", + " } else {\n", + " return 'It\\'s 90 degrees and sunny.';\n", + " }\n", + "}, {\n", + " name: 'get_weather',\n", + " description: 'Call to get the current weather.',\n", + " schema: z.object({\n", + " location: z.string().describe(\"Location to get the weather for.\"),\n", + " })\n", + "})\n", + "\n", + "const getCoolestCities = tool(() => {\n", + " return 'nyc, sf';\n", + "}, {\n", + " name: 'get_coolest_cities',\n", + " description: 'Get a list of coolest cities',\n", + " schema: z.object({\n", + " noOp: z.string().optional().describe(\"No-op parameter.\"),\n", + " })\n", + "})" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import { ToolNode } from '@langchain/langgraph/prebuilt';\n", + "\n", + "const tools = [getWeather, getCoolestCities]\n", + "const toolNode = new ToolNode(tools)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Manually call `ToolNode`" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`ToolNode` operates on graph state with a list of messages. It expects the last message in the list to be an `AIMessage` with `tool_calls` parameter. \n", + "\n", + "Let's first see how to invoke the tool node manually:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " messages: [\n", + " ToolMessage {\n", + " \"content\": \"It's 60 degrees and foggy.\",\n", + " \"name\": \"get_weather\",\n", + " \"additional_kwargs\": {},\n", + " \"response_metadata\": {},\n", + " \"tool_call_id\": \"tool_call_id\"\n", + " }\n", + " ]\n", + "}\n" + ] + } + ], + "source": [ + "import { AIMessage } from '@langchain/core/messages';\n", + "\n", + "const messageWithSingleToolCall = new AIMessage({\n", + " content: \"\",\n", + " tool_calls: [\n", + " {\n", + " name: \"get_weather\",\n", + " args: { location: \"sf\" },\n", + " id: \"tool_call_id\",\n", + " type: \"tool_call\",\n", + " }\n", + " ]\n", + "})\n", + "\n", + "await toolNode.invoke({ messages: [messageWithSingleToolCall] })" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that typically you don't need to create `AIMessage` manually, and it will be automatically generated by any LangChain chat model that supports tool calling.\n", + "\n", + "You can also do parallel tool calling using `ToolNode` if you pass multiple tool calls to `AIMessage`'s `tool_calls` parameter:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " messages: [\n", + " ToolMessage {\n", + " \"content\": \"nyc, sf\",\n", + " \"name\": \"get_coolest_cities\",\n", + " \"additional_kwargs\": {},\n", + " \"response_metadata\": {},\n", + " \"tool_call_id\": \"tool_call_id\"\n", + " },\n", + " ToolMessage {\n", + " \"content\": \"It's 60 degrees and foggy.\",\n", + " \"name\": \"get_weather\",\n", + " \"additional_kwargs\": {},\n", + " \"response_metadata\": {},\n", + " \"tool_call_id\": \"tool_call_id_2\"\n", + " }\n", + " ]\n", + "}\n" + ] + } + ], + "source": [ + "const messageWithMultipleToolCalls = new AIMessage({\n", + " content: \"\",\n", + " tool_calls: [\n", + " {\n", + " name: \"get_coolest_cities\",\n", + " args: {},\n", + " id: \"tool_call_id\",\n", + " type: \"tool_call\",\n", + " },\n", + " {\n", + " name: \"get_weather\",\n", + " args: { location: \"sf\" },\n", + " id: \"tool_call_id_2\",\n", + " type: \"tool_call\",\n", + " }\n", + " ]\n", + "})\n", + "\n", + "await toolNode.invoke({ messages: [messageWithMultipleToolCalls] })" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using with chat models" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We'll be using a small chat model from Anthropic in our example. To use chat models with tool calling, we need to first ensure that the model is aware of the available tools. We do this by calling `.bindTools` method on `ChatAnthropic` model" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import { ChatAnthropic } from \"@langchain/anthropic\";\n", + "\n", + "const modelWithTools = new ChatAnthropic({\n", + " model: \"claude-3-haiku-20240307\",\n", + " temperature: 0\n", + "}).bindTools(tools)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[\n", + " {\n", + " name: 'get_weather',\n", + " args: { location: 'sf' },\n", + " id: 'toolu_01DQJwh6WmCYkvCjDAVkX8E6',\n", + " type: 'tool_call'\n", + " }\n", + "]\n" + ] + } + ], + "source": [ + "(await modelWithTools.invoke(\"what's the weather in sf?\")).tool_calls" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As you can see, the AI message generated by the chat model already has `tool_calls` populated, so we can just pass it directly to `ToolNode`" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " messages: [\n", + " ToolMessage {\n", + " \"content\": \"It's 60 degrees and foggy.\",\n", + " \"name\": \"get_weather\",\n", + " \"additional_kwargs\": {},\n", + " \"response_metadata\": {},\n", + " \"tool_call_id\": \"toolu_01LQSRLQCcNdnyfWyjvvBeRb\"\n", + " }\n", + " ]\n", + "}\n" + ] + } + ], + "source": [ + "await toolNode.invoke({ messages: [await modelWithTools.invoke(\"what's the weather in sf?\")] })" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ReAct Agent" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, let's see how to use `ToolNode` inside a LangGraph graph. Let's set up a graph implementation of the [ReAct agent](https://langchain-ai.github.io/langgraph/concepts/agentic_concepts/#react-agent). This agent takes some query as input, then repeatedly call tools until it has enough information to resolve the query. We'll be using `ToolNode` and the Anthropic model with tools we just defined" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "import { StateGraph, Annotation, messagesStateReducer, END, START } from \"@langchain/langgraph\";\n", + "import { BaseMessage } from \"@langchain/core/messages\";\n", + "\n", + "const MessagesState = Annotation.Root({\n", + " messages: Annotation({\n", + " reducer: messagesStateReducer,\n", + " })\n", + "})\n", + "\n", + "const toolNodeForGraph = new ToolNode(tools)\n", + "\n", + "const shouldContinue = (state: typeof MessagesState.State): \"tools\" | typeof END => {\n", + " const { messages } = state;\n", + " const lastMessage = messages[messages.length - 1];\n", + " if (\"tool_calls\" in lastMessage && Array.isArray(lastMessage.tool_calls) && lastMessage.tool_calls?.length) {\n", + " return \"tools\";\n", + " }\n", + " return END;\n", + "}\n", + "\n", + "const callModel = async (state: typeof MessagesState.State): Promise> => {\n", + " const { messages } = state;\n", + " const response = await modelWithTools.invoke(messages);\n", + " return { messages: [response] };\n", + "}\n", + "\n", + "\n", + "const workflow = new StateGraph(MessagesState)\n", + " // Define the two nodes we will cycle between\n", + " .addNode(\"agent\", callModel)\n", + " .addNode(\"tools\", toolNodeForGraph)\n", + " .addEdge(START, \"agent\")\n", + " .addConditionalEdges(\n", + " \"agent\",\n", + " shouldContinue,\n", + " )\n", + " .addEdge(\"tools\", \"agent\");\n", + "\n", + "const app = workflow.compile()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCADaAMcDASIAAhEBAxEB/8QAHQABAAMBAAMBAQAAAAAAAAAAAAUGBwgCAwQJAf/EAE8QAAEDBAADAwYIBw0HBQAAAAECAwQABQYRBxIhEzFVCBYiQZTRFBUXMlFhk+EJN0JxdbO0IyQ0NkNSYnN2gaHB0hhUVpGSlbElM0Vyov/EABsBAQACAwEBAAAAAAAAAAAAAAACAwEEBQYH/8QANREAAgECAQgIBQUBAQAAAAAAAAECAxExBBITIUFRUpEFFBVhcaGxwSIyM2LRQnKB4fA0Y//aAAwDAQACEQMRAD8A/VOlKUApSlAK+SbdoNtKBMmx4pX1SH3Uo5vzbNfXWZ5/Cjzs/tSJMdqQkWyQQl1AUAe1a+mjlGEZTlgk2XUaelmoXxLx51WXxiB7Sj3086rL4xA9pR76zvzetfhsP7BHup5vWvw2H9gj3Vye1cn4Jc0dPs77vI0TzqsvjED2lHvp51WXxiB7Sj31nfm9a/DYf2CPdTzetfhsP7BHup2rk/BLmh2d93kaJ51WXxiB7Sj3086rL4xA9pR76zvzetfhsP7BHup5vWvw2H9gj3U7VyfglzQ7O+7yNE86rL4xA9pR76edVl8Yge0o99Z35vWvw2H9gj3U83rX4bD+wR7qdq5PwS5odnfd5GiedVl8Yge0o99eTWS2h91Dbd1hOOLISlCZCCVE9wA3Wc+b1r8Nh/YI91Rl/s1visW91mDGZdTdbfpbbKUqH78Z9YFX0OkKFetCiotZzSxW12IyyDNi5Z2BtdKUrfOQKUpQClKUApSlAKUpQClKUApSlAKznNfxg2v9FyP1rVaNWc5r+MG1/ouR+taqqt9Cp+1m5kn1onjSlK8IenILMs4snD6zi6X+cIENTqI6FBtbq3HVHSUIQgFS1HrpKQT0P0VQMr8pDHsemYQY7U242zJH5DZlsW+WtbCGW3CSGkslal9ogJKNBQHMrWgTUxxztlrueHxhdLbkE4MT2ZEaRjDCnp0B9IUUSEJTs+j1B9FXztFJBNZeZmcO2LhZmGT2O73WRYr5NMtuLbv/AFBcNxiQwxIcit9UrIU2VoSOm+4dQNulThKN5d+3u1GtUnJOy7vU1jJuOeEYbdmbder0q3yXG23SXYb/AGbSXOiC64G+Rrf9Mpr6cl4w4liWRjH7lcnU3tUduWmBGhSJLqmVqUhKwlptWxtCt6+boE6BG8H41NZRnxzu3ybTm0iPPs7Qxe22pl2PDV2kbbhmKSUjtEulQU08e5ICUqJrQ+HlonO8ZxfH7VOjRXcGtcdMmXFW1yu9u+txklQGnACgqQeo6bFSdKEYKT3b/DuIqpNyzUTnDjjjbeIWX5Tj7cObElWe4uQ2lLhSQ282httSlqcU0lCFcy1AIKuYgBQ2FA1plY9wzfnYjxTz+xXCx3dKb3e1XaFdWoS1wFsqiMpIU+PRQoKZUnlVo7I1vdbDVFVRUvhwsi6m21rFRGTfwOB+lLf+2M1L1EZN/A4H6Ut/7YzW10d/20f3R9UKv05eDNfpSlewPIilKUApSlAKUpQClKUApSlAKUpQCs5zX8YNr/Rcj9a1WjVXMlwaDk8+NNfkzYsmO0plK4b/AGe0qIJB6HfVIrEoqpCUG7XTRfQqKlUU2ZzlfD3GM6VGOR4/bL6YvMGDcIqHuy5tc3LzA63yp3r6BUB/s/cMt78wMb/7Wz/prUvkqg+MXv237qfJVB8Yvftv3VxV0XNKyrep1nltB63EpWLcOMVwd997HcctdjdkJCHV2+IhkuJB2AopA2BVjqS+SqD4xe/bfup8lUHxi9+2/dUX0S5O7qrkySy+ktSTI2lZpxkizcJ4ncI7HbL3dEQMlu78O4B2RzKU2hnnTynXonfrrXfkqg+MXv237qx2P/6rkzPaFLcyvXyxW7JrVItl2gx7nbpAAdiy2g404AQRzJPQ9QD/AHVUEcAeGjZ2nAccSdEbFsZHQjRHzforUPkqg+MXv237qfJVB8Yvftv3VNdFSjqVZcmReXUXjEzi18E+H9juMa4W/CrDBnRlh1mTHtzSHG1juUlQTsEfTU9k38DgfpS3/tjNWn5KoPjF79t+6v6nhRbO3juO3G7SUsPtyEtPS+ZBW2sLTsa6jmSD/dWxk/R7pV6dadW+a08HsdyEstpOLjFWuXWlKV0ziClKUApSlAKUpQClKUApSlAKUpQClKUApSlAKUpQHO/lI/jx8nn+0Mv9mNdEVzv5SP48fJ5/tDL/AGY10RQClKUApSlAKUpQClKUApSlAKUpQClKUApSlAKUpQClKUApSlAKUpQHO/lI/jx8nn+0Mv8AZjXRFc7+Uj+PHyef7Qy/2Y10RQClKUApSlAKUpQClKUApSlAKUpQClKUApSlAKUpQClKUApSoTJMvgYyG0P9rJmvAlmDFTzvOgd5A2AlPcOZRCRsbPUVKMXJ2iZScnZE3Xy3S2Rb1bZdvnx25cGWyuO+w6NodbUkpUlQ9YIJB/PVDczzJZJKmLPboTfXlEmWt1z6thKAAfqCj+evX555d/u1k/6nqt0W+S5m11Ws/wBJ+OnlF8G5fArjBfsRfClxWHu2t76v5eKv0mlb9Z16Kv6SVD1V+r3kU8F3+B/AOz2qehbV6ujirxcWXO9p51CAG9eopbQ2kj+clX01UOLnBg8Zs/wrLL5EtIn4w/2qW2u05ZiAoLQ07sbKErHMB/SWPyumueeeXf7tZP8AqepolxLmOqVtxpVKzdGa5Ykgrh2Zwb+al11HT8/Kf/FS1n4jsvyGot5hKskh1QQ26p0OxnFE6CQ7oaJOgAtKdkgDZ6VjRN/K0/B+2JCWT1YK7iXKlKVSa4pSlAKUpQClKUApSlAKUpQClKUApSlAKUpQEVlF+RjNhl3FbfbKaSEtsg6LrqlBLaAfUVLUlP8AfWew4zqFOyZbpk3CQQuQ+T3n1JT9CE7ISn1D6ySZ7istXwbHGv5J27oDn0aSy8tP/wC0IqKq2fwU4pbdfsl5M7OQwWa57RSudOPPFHJ8ZvGSSMQv1zlOY3ARMnWqJZYz0GOeQualSHVJX6aBvlaPMkddHYr08TuL2QfHOTohZfFwNiy41HvUCPIjMPLuzrqXVFO3QSUJLaG9NgK5l9/cK1bG660VdHSNK5me4ncQslvbGO2hrIYr9msdtk3J61wLdIlOy5LJWQ8JTjSUpHLrTaNlXP1SAAZm0ZRxLyrLsRxq63M4RcZeOS7hc2o0OM+6HmZbbSFo5u0QgrSsKI2sAKI79KCxlVU8Ezf1uJb5eZQTzHlGzrZ+ivF9huSy4y82l1pxJQttYBSpJGiCD3g1yreLtknEqzcHJUzI37ZeGcunWp+XAisacdYRMaTICHELAVytH0fm/uqunROuqIbLkeIw06+uU6hCUrfcSlKnCBoqISAAT39AB16Cs4ayUJ599RO4BfHkSpNgmOrfcjtiREfeXzLcYKtFKiepLatDZ6lKkbJPMau1ZXblqa4gY2pHznBKaXrv7Mtcx/u5kI/wrVK2qmtRnvXu17XODlUFCq0hSlKpNQUpSgFKUoBSlKAUpSgFKUoBSlKAUpSgK7ntjfv2NvtRE80+OtEuMknl5nG1BQRv1BYBQT9CzVNhTG7hFbkNE8ix3KGlJI6FJHqIIII9RBFapWP53fbHbOKNpxa1TixmV7juTfiv4K4uM80gHbzziEkMElJSHDvZ0ClXokWq045jdrYfg38lyhUm4ywZRcu4BWPMLtf5b92vkCLkDSGrtbbfMDUaaUt9mlaxyFYPIEpPKpIUEgKB67zzirwnyQ5JZn8etuR3ddstLEKJdI92tiOR1vm0txqSwSgn0SpbOubp6I5RXQjhvcMlEvGLilQ36cUtPtq+sFK+b/mkH6q9fxhP/wCHL17J99Y6vV2LzR03KjJapGbp4KyciYsl9vWQ3SxZ2m1swbtdMZkIYTNKRtSVpW2pJAUVEKCUkb6aGgLZb+G9vt+V2jIRMuEi4WyzrsjZkvh0OMqW2srcURzKc20n0ubrs7BJ3Xvu2dQ7DcLZAucSXbp1zcLMCLLDbTstY1tLSVLBWRsdE7PUVK/GE/8A4cvXsn306vV3E1Oitq5lEk8BLC/iUWxNXG7QzDvD18h3KM+hEuNKcdccUUK5OXl/dnE8qkn0T12etaBaoKrZbIkNUp+cqOyhoyZSgp14pAHOsgAFR1skAdT3V4ImXJ0hKMbvKlE60phKP8VLAqTtuH3u/KHxmn4it5+eyy8Fy3B/NK07S2PUSkqV1Oik6VTQSXz2S8fbEi61GmrpnswW3qu2TSLwRuFBaXCjq3tLjqlDtlD/AOnIlG/pLg6aO9Eqk8OOJWLZrJv9jx3to7+MSvi2bAehORTHUNhHKFJAKFBJKSn1a7t1dqTkpNJYLD/eZwatR1ZubFKUqsqFKUoBSlKAUpSgFKUoBSlKAUpSgFfwkDvOvz1HT8hgQLgzbFTIxvEllx+LblPoQ/ISjXMUJJ2QNjZ7hsbrNIWEz+OuKY3cOJmPycVuFtupujFit95WpBCFEx/hJb5QpSfRXoHopAOwCpFAfdcciu3FRzPcMtEfI8GdtyG4jOXLiIShx5Q5l/BkrO1gJ5RzgD550UkJJvWM4+nGrDbLaZsu6uwYrcX4wuKw5KfCQBzOLAHMo62TrqetStKAUpUbklkTkuO3W0LlyoCLhFdiGXCWEPshaCnnbUQQFp3sEggEDoaA/ILyy/KMmcT/ACiF3ewXBTVrxR8RLJIjr/LaXzKkJPdtTg2FfzUo+iv1L8n/AIvQ+OXCWwZfE5W3pjPJMjp/kJKPRdR9OuYEjfekpPrriHi3+D/4e4FxI4V4/b7zkz0PKro/CmuSZUdTjaEM84LRSwADvv5goa9VdreT/wCT/j3k4YbMxrGplznQJU9dxW5dXW3HQ4pttsgFttA5dNJ9W9k9e7QGmUpSgKvxFwCJxIw+6Y/Jn3CzonpRzT7PIMaU0pCgpCkuD1gpHfsEdKhY96ynFM0xXEW8bnZBi7lt7OTmD89tTrMltJ/99s+krnCUnnH5S+6tCpQEXjmUWfMLYLjY7pDu8ArU18JhPJdb50nSk7SSNg9CKlKzDKOFdxx3DrhE4Pu2TAb3LuKbk8tdtS5GlL6BaFpTrk5wlIKkgkAHQBOxLw+LFrVxTc4dyY9xayBu2puSJSoDiIcpvYDhac6j0CUbBOgVgAkg6AvFKUoBSlKAUpSgFKUoBSlKAVnWY57OvreX4rw6uFqd4i2VqMXI14Q6iPFD/pIcUQn0/wBz5lDl2NgA67q0Ws0uU5GN8d7NGhYIqQvJbe/8YZfGbJ+D/BgC2w8Qg6Srm9EqWOvQA+oCdtPDazoyO35fdrXbZudtW1u3v3xmNyKIAJX2YJVyJKlL9ZOiEkkCrdSlAKUpQClKw7jX5Q0jGsgZ4fcPLajLuKE9vmbgJV+9rW2dfviYsfMSNghOwVbHdzJ2BX/KPuURzyhvJ6tiJLS7im9S5KoiVguhr4OR2hT3hOwRvu6H6DXSNY7wL8nmPwxkzcoyO5Ly/iVeBzXTJJY2ob/kI6f5JlOgAABvQ3oBKU7FQClKUApSlAK9E2G3cIj8Z3nDbzamlFpam1hKho8qkkFJ+sEEeqvfSgMgj4vkPAPBccsOAWeZncBq59lKbvV51KjRHFHRaWtPKUtcydJ6aQg95JUNStV7t19aedts+LcGmXVMOLivJdShxPRSFFJOlD1g9RX21lPk6ysJl41kqsFhzYUBORz0TkTiSpc4LHbqTtSvQJ1ru/MKA1alKUApSlAKUpQClKUAr8+/KJ/CQXPF81h47jGK3exSrHdWlXtu9LjIclIbWsPQwlAeSlCwGyH0Ob79JI0T+gD8hqK2XHnUNIHepxQSP+Zri7y8vJnsfGSyO5ticy3jOLaz++IzUhG7pHSPmaB6upA9E96h6J36OpKMpYIFz8hzym808pa35fOyq1We3RbU7FZhOWlh1sOrWHS6F9o6vfKEta1r5x7/AFdRVyl+Dyx6Dw48nC3qucli23O9TZFzfjS3EtuoBIab2lWiAUNJWPqXv110z51WXxiB7Sj31LRz4WZsyUpXzQ7nDuG/gstiTrqexcC//BrmfJuIuVeVFkU/C+F8yRj2BQnVRb/nqElLj6h0XFt++9XqLvq3sdOXng01qZgluJfHTIc/zGXww4MdjLyBj0L5lrqe0gWBB2CAe52R0OkDYBHXelcui8FOBePcD7A/EtfbXG8T1/CLrfp6u0mXF87JcdWeutk6TvQ2e8kkzfDLhfjfCDEYmN4rbW7bbI/UhPVx5Z+c44vvWs66k/UBoAAWusAUpSgFKUoBSvkm3aDbSgTJseKV9Uh91KOb82zXzedVl8Yge0o99TUJNXSM2ZKUqL86rL4xA9pR76edVl8Yge0o99Z0c+FizMY8rPyn5vkv2XH7s3hispt9zkOxXnxcfgiYriUpU2k/uTnMVjtCO7XZnv3XO/Cf8Jffs3yy24rC4UQpd2vFx7GMIV3VHQhK1DRcBYXspGypewNAnQ1XV/HnD8Y438KMhw+Zd7ahc6OTEkLko/e8lPpNOdDvQUBvXekqHrrjn8G5wOZxbJ8izzLixbbhbXXLPbI0x1CFJc7pDwBPqGmwobB5nB6qaOfCxZn6NUqL86rL4xA9pR76edVl8Yge0o99NHPhYsyUpUX51WXxiB7Sj30TlFmUoAXeCSegAko6/wCNNHPhYsyUpSlVmBVQy7Ln4ksWm0hBuBSFvyXBzNxEHu6flOK/JT3AAqV05Urtch9EWO684dNtpK1H6gNmshxpbku1N3F/Rl3I/DX1DfVSwCB19SU8qR9SRVsbRi6j2YeJu5LRVWfxYI/i8agy3u3uLZvEsjRk3HTyz130BHKkfUkAfVXu837WP/jYf2CPdVO4wcXYnCOJj78qHImC63Vi3nsGHnS0hSvTc02hZUoDuR0Kj3b0RX0ZFxsw3FI1sdul0djKuUb4ZHjCBJXJ7HptxbKWy42kb6laU6OwdEGq3WqSxkzuJwjq1KxafN+1+Gw/sE+6nm/a/DYf2CfdVdv/ABgw/G7PaLnMvbS4l4Tz24wmnJTktPLzFTbbSVLUACCSBobG9VDSuLzFyyjhszjkiFdLBlLs5DkwBRUAxHW4OTqOVXOjlUFAkaI0DUdJPiZlyii7PYrZ3lBZtsZDqSFJdabDbiSO4hSdEf3GpbGr+7hfZw5jhkWNbhAkKSO1irWvZU4ofPbKlElZ9JJJUoqBKkU2wcXMTyjJ5WP2q6mbc4y3W3EojPBrmbOnEpeKOzUUnoQlRIq3PMokMradQlxtaSlSFDYUD0INWRrSwm7r/YbiqpShWjY1ClVPhjcHJmKNx33C6/b3nYKlkklSW1ENkk9SS3yEk+vff31bKTjmScdx5yUXFuLFKUqBEUpSgMzz+FHnZ/akSY7UhItkghLqAoA9q19NfH5vWvw2H9gj3VJZr+MG1/ouR+tarxrn5fUnGcUm1qXqzxfS0pLKWk9iI/zetfhsP7BHup5vWvw2H9gj3VIVGZLk1rw+ySrvepzVutsYAuyHjoDZAAHrJJIAA2SSAASa5ulqP9T5nHU5t2TZ5+b1r8Nh/YI91PN61+Gw/sEe6qjD474LNsV2vCb8lmFaezM/4VFeYdjJcUEoUtpxCXAlRPRXLroevQ1IYrxYxXNJc+Larr2kmCymS+1JjuxlBlW+V1IdSnnbOj6adp+us59ZbX5ljVZJtp6vEnvN61+Gw/sEe6nm9a/DYf2CPdWVRvKOsmUcTsLxvFJ0e6wruuaJj64j6PQZYUtCmHFBKFpK06Kk8419HfWy0lOrHGT5mJqrTtn3VyP83rX4bD+wR7qhc0sluYxa4uNQIrbiW9pWhlIIOx3HVWqoLOf4pXP+q/zFbOSVajyims5/MtveWZPOWmhr2r1NlpSldg+inzXKILhbpUUnQfaU3v6Ngj/OslxVxS8btoWlSHW2EsuIUNFK0DlWD+ZSSK2Os6yqwu45cZN1iMKetUtZdmNtDa4zpABdCfW2rXpa6pV6WiFKKLorPg6axxX4/wBusdDI6qpzaltMm8oK23GTjmOXK322Xd/iTI7fdZMSA2XZC2GnP3QtoHVagFb5R1OjVWVkcvFeK9yzl7E8mulnyGxxY0X4HaXHZcR1h17mYdY1ztBfaJUCoBOwdkVusaSzMYQ/HdQ+y4OZDjagpKh9II6GvZWq9WpnYcLvOTOWuHWJZDwZl4JkV9xy53GKmx3C3yIVmjGa9anX53wttPZo2op5D2RUgHRQN6FeeM4pkmP3vC8vmY1c24UjMLxc3bZHY7STb485lbbKnW0np6Wlr1vl5zvuNdRUrFyCopWs8P6/BgOAfGti4xfFuK2fJrbh0uRPfvUG+wC3CivbKkPwnj1IdcJJbSpSdKJ0kjVb9SvlhxnswkLt9scIjBXJMuKN8jKd6UhtQ6F0jYAHzPnK/JSuyEHUfdte4k3GjFuT1Fk4URyMdlzNEJn3CRIRsaJQFdmk/mIbBH1EVdK9EKGxbobESM0liMw2lpppA0lCEjQA+oACvfVtSWfNyR5ucs+TlvFKUqsgKUpQGc5r+MG1/ouR+tarxryzX8YNr/Rcj9a1Vcyvh7jGdKinI8ftt9MXmDBuEVD3Zc2ubl5gdb5U719ArmdIW0kb7l7niulbda17kWGsj8pfErrlWGWR61xJ1y+Jr7EusuBa5CmJcmO3zhxLK0qSQ4OcLTpQJKBo71U5/s+8Mt/xAxv/ALWz/pqdxXhviuDPvvY7jlrsbshIQ6u3xEMlxIOwFFIGwK5yai7o5kJRpyU4t3Xd/Zz3mWE23JeFmd3PHMZzpd9ehxbeheTKnPyZLQkodLbLT61r0ggknlA6nW+tWjjXgN/zTiDkEazxZCPjDh7cLa1N5FJYMhUlooZU5rlClDm6E70VHu3W/wBKlpWixZTJNNbL46934OdLJe5uYcRODoj4RkePR7C1OanfGFqcYjxCYRbSgOa5VJ5hpKh6J6ddnVdF181xt0W8W+TBnR2pkKS2pl+O+gLQ6hQ0pKknoQQSCDVJHk/8MwQRgGOAjuItjP8ApqLkpY6iE5wqWvqt/O1vf3l/qCzn+KVz/qv8xUBG4DcN4chp9jBMdZfaUFtuItjIUlQOwQeXoQan85/ilc/6r/MVsZJbrNO3EvUzQUdNDNe1evibLSlK7h9GFKUoCr3PhvYbnJckiM7BkuHa3bfIcjlZ3slQQQFHfrIJr4PkogeL3r237qu9KvVeov1FiqzjqUmUj5KIHi969t+6nyUQPF717b91XelZ09Tf6EtNU4mU5nhVYwoGUu4XJIIPZy5zimzr6UAhJ/MQRVriRGIEZuPGZbjx2khKGmkBKEAdwAHQCvdSq5VJz1SZXKUpfM7ilKVWRFKUoBSlKArmS4NByefGmvyZsWTHaUylcN/s9pUQSD0O+qRUZ8lUHxi9+2/dV2pVmklZL2RXKnCTvKKf8FJ+SqD4xe/bfup8lUHxi9+2/dV2pTSPu5IjoaXAuSKT8lUHxi9+2/dT5KoPjF79t+6rtSmkfdyQ0NLgXJFJ+SqD4xe/bfup8lUHxi9+2/dV2pTSPu5IaGlwLkik/JVB8Yvftv3V65HCG1y2lNSLneH2VfObXM2lQ+g9KvVKyqsk7r0RlUaSd1FckKUpVRaf/9k=" + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import * as tslab from \"tslab\";\n", + "\n", + "const drawableGraph = app.getGraph();\n", + "const image = await drawableGraph.drawMermaidPng();\n", + "const arrayBuffer = await image.arrayBuffer();\n", + "\n", + "await tslab.display.png(new Uint8Array(arrayBuffer));" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's try it out!" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " type: 'human',\n", + " content: \"what's the weather in sf?\",\n", + " toolCalls: undefined\n", + "}\n", + "{\n", + " type: 'ai',\n", + " content: [\n", + " { type: 'text', text: \"Okay, let's check the weather in SF:\" },\n", + " {\n", + " type: 'tool_use',\n", + " id: 'toolu_01Adr6WYEuUuzShyDzwYZf5a',\n", + " name: 'get_weather',\n", + " input: { location: 'sf' }\n", + " }\n", + " ],\n", + " toolCalls: [\n", + " {\n", + " name: 'get_weather',\n", + " args: { location: 'sf' },\n", + " id: 'toolu_01Adr6WYEuUuzShyDzwYZf5a',\n", + " type: 'tool_call'\n", + " }\n", + " ]\n", + "}\n", + "{\n", + " type: 'tool',\n", + " content: \"It's 60 degrees and foggy.\",\n", + " toolCalls: undefined\n", + "}\n", + "{\n", + " type: 'ai',\n", + " content: 'The current weather in San Francisco is 60 degrees and foggy.',\n", + " toolCalls: []\n", + "}\n" + ] + } + ], + "source": [ + "import { HumanMessage } from \"@langchain/core/messages\";\n", + "\n", + "// example with a single tool call\n", + "const stream = await app.stream(\n", + " {\n", + " messages: [new HumanMessage(\"what's the weather in sf?\")],\n", + " },\n", + " {\n", + " streamMode: \"values\"\n", + " }\n", + ")\n", + "for await (const chunk of stream) {\n", + " const lastMessage = chunk.messages[chunk.messages.length - 1];\n", + " const type = lastMessage._getType();\n", + " const content = lastMessage.content;\n", + " const toolCalls = lastMessage.tool_calls;\n", + " console.dir({\n", + " type,\n", + " content,\n", + " toolCalls\n", + " }, { depth: null });\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " type: 'human',\n", + " content: \"what's the weather in the coolest cities?\",\n", + " toolCalls: undefined\n", + "}\n", + "{\n", + " type: 'ai',\n", + " content: [\n", + " {\n", + " type: 'text',\n", + " text: \"Okay, let's find out the weather in the coolest cities:\"\n", + " },\n", + " {\n", + " type: 'tool_use',\n", + " id: 'toolu_01Qh1jhQHH14ykNEx5oLXApL',\n", + " name: 'get_coolest_cities',\n", + " input: { noOp: 'dummy' }\n", + " }\n", + " ],\n", + " toolCalls: [\n", + " {\n", + " name: 'get_coolest_cities',\n", + " args: { noOp: 'dummy' },\n", + " id: 'toolu_01Qh1jhQHH14ykNEx5oLXApL',\n", + " type: 'tool_call'\n", + " }\n", + " ]\n", + "}\n", + "{ type: 'tool', content: 'nyc, sf', toolCalls: undefined }\n", + "{\n", + " type: 'ai',\n", + " content: [\n", + " {\n", + " type: 'text',\n", + " text: \"Now let's get the weather for those cities:\"\n", + " },\n", + " {\n", + " type: 'tool_use',\n", + " id: 'toolu_01TWgf1ezxk1hAzwYtqrE3cA',\n", + " name: 'get_weather',\n", + " input: { location: 'nyc' }\n", + " }\n", + " ],\n", + " toolCalls: [\n", + " {\n", + " name: 'get_weather',\n", + " args: { location: 'nyc' },\n", + " id: 'toolu_01TWgf1ezxk1hAzwYtqrE3cA',\n", + " type: 'tool_call'\n", + " }\n", + " ]\n", + "}\n", + "{\n", + " type: 'tool',\n", + " content: \"It's 90 degrees and sunny.\",\n", + " toolCalls: undefined\n", + "}\n", + "{\n", + " type: 'ai',\n", + " content: [\n", + " {\n", + " type: 'tool_use',\n", + " id: 'toolu_01NyRcucFHEZmyA6hE6BtTPs',\n", + " name: 'get_weather',\n", + " input: { location: 'sf' }\n", + " }\n", + " ],\n", + " toolCalls: [\n", + " {\n", + " name: 'get_weather',\n", + " args: { location: 'sf' },\n", + " id: 'toolu_01NyRcucFHEZmyA6hE6BtTPs',\n", + " type: 'tool_call'\n", + " }\n", + " ]\n", + "}\n", + "{\n", + " type: 'tool',\n", + " content: \"It's 60 degrees and foggy.\",\n", + " toolCalls: undefined\n", + "}\n", + "{\n", + " type: 'ai',\n", + " content: 'Based on the results, the weather in the coolest cities is:\\n' +\n", + " '- New York City: 90 degrees and sunny\\n' +\n", + " '- San Francisco: 60 degrees and foggy\\n' +\n", + " '\\n' +\n", + " 'So the weather in the coolest cities is a mix of warm and cool temperatures.',\n", + " toolCalls: []\n", + "}\n" + ] + } + ], + "source": [ + "// example with a multiple tool calls in succession\n", + "const streamWithMultiToolCalls = await app.stream(\n", + " {\n", + " messages: [new HumanMessage(\"what's the weather in the coolest cities?\")],\n", + " },\n", + " {\n", + " streamMode: \"values\"\n", + " }\n", + ")\n", + "for await (const chunk of streamWithMultiToolCalls) {\n", + " const lastMessage = chunk.messages[chunk.messages.length - 1];\n", + " const type = lastMessage._getType();\n", + " const content = lastMessage.content;\n", + " const toolCalls = lastMessage.tool_calls;\n", + " console.dir({\n", + " type,\n", + " content,\n", + " toolCalls\n", + " }, { depth: null });\n", + "}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`ToolNode` can also handle errors during tool execution. See our guide on handling errors in `ToolNode` [here](https://langchain-ai.github.io/langgraphjs/how-tos/tool-calling-errors/)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "TypeScript", + "language": "typescript", + "name": "tslab" + }, + "language_info": { + "codemirror_mode": { + "mode": "typescript", + "name": "javascript", + "typescript": true + }, + "file_extension": ".ts", + "mimetype": "text/typescript", + "name": "typescript", + "version": "3.7.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}