Skip to content

Commit

Permalink
updated breakpoints doc
Browse files Browse the repository at this point in the history
  • Loading branch information
bracesproul committed Aug 19, 2024
1 parent a1440b7 commit 3cdee7b
Showing 1 changed file with 56 additions and 63 deletions.
119 changes: 56 additions & 63 deletions examples/how-tos/breakpoints.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -65,61 +65,56 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 5,
"id": "9b53f191-1e86-4881-a667-d46a3d66958b",
"metadata": {},
"outputs": [],
"source": [
"import { StateGraph, START, END } from \"@langchain/langgraph\";\n",
"import { StateGraph, START, END, Annotation } from \"@langchain/langgraph\";\n",
"import { MemorySaver } from \"@langchain/langgraph\";\n",
"\n",
"type GraphState = {\n",
" input: string;\n",
"}\n",
"const GraphState = Annotation.Root({\n",
" input: Annotation<string>\n",
"});\n",
"\n",
"const step1 = (state: GraphState): Partial<GraphState> => {\n",
" console.log(\"---Step 1---\");\n",
" return state;\n",
"const step1 = (state: typeof GraphState.State) => {\n",
" console.log(\"---Step 1---\");\n",
" return state;\n",
"}\n",
"\n",
"const step2 = (state: GraphState): Partial<GraphState> => {\n",
" console.log(\"---Step 2---\");\n",
" return state;\n",
"const step2 = (state: typeof GraphState.State) => {\n",
" console.log(\"---Step 2---\");\n",
" return state;\n",
"}\n",
"\n",
"const step3 = (state: GraphState): Partial<GraphState> => {\n",
" console.log(\"---Step 3---\");\n",
" return state;\n",
"const step3 = (state: typeof GraphState.State) => {\n",
" console.log(\"---Step 3---\");\n",
" return state;\n",
"}\n",
"\n",
"\n",
"const builder = new StateGraph<GraphState>({\n",
" channels: {\n",
" input: null,\n",
" }\n",
"})\n",
" .addNode(\"step1\", step1)\n",
" .addNode(\"step2\", step2)\n",
" .addNode(\"step3\", step3)\n",
" .addEdge(START, \"step1\")\n",
" .addEdge(\"step1\", \"step2\")\n",
" .addEdge(\"step2\", \"step3\")\n",
" .addEdge(\"step3\", END);\n",
"const builder = new StateGraph(GraphState)\n",
" .addNode(\"step1\", step1)\n",
" .addNode(\"step2\", step2)\n",
" .addNode(\"step3\", step3)\n",
" .addEdge(START, \"step1\")\n",
" .addEdge(\"step1\", \"step2\")\n",
" .addEdge(\"step2\", \"step3\")\n",
" .addEdge(\"step3\", END);\n",
"\n",
"\n",
"// Set up memory\n",
"const memory = new MemorySaver()\n",
"const graphStateMemory = new MemorySaver()\n",
"\n",
"// Add \n",
"const graph = builder.compile({\n",
" checkpointer: memory,\n",
" interruptBefore: [\"step3\"]\n",
" checkpointer: graphStateMemory,\n",
" interruptBefore: [\"step3\"]\n",
"});"
]
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 6,
"id": "c2dd360b",
"metadata": {},
"outputs": [
Expand All @@ -134,11 +129,11 @@
"source": [
"import * as tslab from \"tslab\";\n",
"\n",
"const drawableGraph = graph.getGraph();\n",
"const image = await drawableGraph.drawMermaidPng();\n",
"const arrayBuffer = await image.arrayBuffer();\n",
"const drawableGraphGraphState = graph.getGraph();\n",
"const graphStateImage = await drawableGraphGraphState.drawMermaidPng();\n",
"const graphStateArrayBuffer = await graphStateImage.arrayBuffer();\n",
"\n",
"await tslab.display.png(new Uint8Array(arrayBuffer));"
"await tslab.display.png(new Uint8Array(graphStateArrayBuffer));"
]
},
{
Expand All @@ -155,7 +150,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 7,
"id": "dfe04a7f-988e-4a36-8ce8-2c49fab0130a",
"metadata": {},
"outputs": [
Expand All @@ -179,20 +174,20 @@
"const initialInput = { input: \"hello world\" };\n",
"\n",
"// Thread\n",
"const config = { configurable: { thread_id: \"1\" }, streamMode: \"values\" as const };\n",
"const graphStateConfig = { configurable: { thread_id: \"1\" }, streamMode: \"values\" as const };\n",
"\n",
"// Run the graph until the first interruption\n",
"for await (const event of await graph.stream(initialInput, config)) {\n",
" console.log(`--- ${event.input} ---`);\n",
"for await (const event of await graph.stream(initialInput, graphStateConfig)) {\n",
" console.log(`--- ${event.input} ---`);\n",
"}\n",
"\n",
"// Will log when the graph is interrupted, after step 2.\n",
"console.log(\"---GRAPH INTERRUPTED---\");\n",
"\n",
"// If approved, continue the graph execution. We must pass `null` as\n",
"// the input here, or the graph will\n",
"for await (const event of await graph.stream(null, config)) {\n",
" console.log(`--- ${event.input} ---`);\n",
"for await (const event of await graph.stream(null, graphStateConfig)) {\n",
" console.log(`--- ${event.input} ---`);\n",
"}\n"
]
},
Expand Down Expand Up @@ -221,16 +216,18 @@
"import { ChatAnthropic } from \"@langchain/anthropic\";\n",
"import { tool } from \"@langchain/core/tools\";\n",
"import { StateGraph, START, END } from \"@langchain/langgraph\";\n",
"import { MemorySaver } from \"@langchain/langgraph\";\n",
"import { MemorySaver, Annotation } from \"@langchain/langgraph\";\n",
"import { ToolNode } from \"@langchain/langgraph/prebuilt\";\n",
"import { BaseMessage, AIMessage } from \"@langchain/core/messages\";\n",
"import { z } from \"zod\";\n",
"\n",
"interface MessagesState {\n",
" messages: BaseMessage[];\n",
"}\n",
"const AgentState = Annotation.Root({\n",
" messages: Annotation<BaseMessage[]>({\n",
" reducer: (x, y) => x.concat(y),\n",
" }),\n",
"});\n",
"\n",
"const search = tool((input) => {\n",
"const search = tool((_) => {\n",
" return \"It's sunny in San Francisco, but you better look out if you're a Gemini 😈.\";\n",
"}, {\n",
" name: \"search\",\n",
Expand All @@ -239,7 +236,7 @@
"})\n",
"\n",
"const tools = [search]\n",
"const toolNode = new ToolNode<MessagesState>(tools)\n",
"const toolNode = new ToolNode<typeof AgentState.State>(tools)\n",
"\n",
"// Set up the model\n",
"const model = new ChatAnthropic({ model: \"claude-3-5-sonnet-20240620\" })\n",
Expand All @@ -249,7 +246,7 @@
"// Define nodes and conditional edges\n",
"\n",
"// Define the function that determines whether to continue or not\n",
"function shouldContinue(state: MessagesState): \"action\" | typeof END {\n",
"function shouldContinue(state: typeof AgentState.State): \"action\" | typeof END {\n",
" const lastMessage = state.messages[state.messages.length - 1];\n",
" // If there is no function call, then we finish\n",
" if (lastMessage && !(lastMessage as AIMessage).tool_calls?.length) {\n",
Expand All @@ -260,21 +257,15 @@
"}\n",
"\n",
"// Define the function that calls the model\n",
"async function callModel(state: MessagesState): Promise<Partial<MessagesState>> {\n",
"async function callModel(state: typeof AgentState.State): Promise<Partial<typeof AgentState.State>> {\n",
" const messages = state.messages;\n",
" const response = await modelWithTools.invoke(messages);\n",
" // We return an object with a messages property, because this will get added to the existing list\n",
" return { messages: [response] };\n",
"}\n",
"\n",
"// Define a new graph\n",
"const workflow = new StateGraph<MessagesState>({\n",
" channels: {\n",
" messages: {\n",
" reducer: (a: any, b: any) => a.concat(b)\n",
" },\n",
" }\n",
"})\n",
"const workflow = new StateGraph(AgentState)\n",
" // Define the two nodes we will cycle between\n",
" .addNode(\"agent\", callModel)\n",
" .addNode(\"action\", toolNode)\n",
Expand Down Expand Up @@ -308,7 +299,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 2,
"id": "4476aef1",
"metadata": {},
"outputs": [
Expand Down Expand Up @@ -344,7 +335,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 3,
"id": "cfd140f0-a5a6-4697-8115-322242f197b5",
"metadata": {},
"outputs": [
Expand All @@ -362,7 +353,7 @@
" },\n",
" {\n",
" type: 'tool_use',\n",
" id: 'toolu_01Mo7noa5MEewbwAKyFDK8mg',\n",
" id: 'toolu_018UxZU1fXTwq9sndFcYY19z',\n",
" name: 'search',\n",
" input: { input: 'current weather in San Francisco' }\n",
" }\n",
Expand Down Expand Up @@ -403,7 +394,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 4,
"id": "51923913-20f7-4ee1-b9ba-d01f5fb2869b",
"metadata": {},
"outputs": [
Expand All @@ -416,11 +407,13 @@
"================================ ai Message (1) =================================\n",
"Based on the search results, I can provide you with information about the current weather in San Francisco:\n",
"\n",
"The weather in San Francisco is currently sunny. This means it's a clear day with plenty of sunshine. It's a great day to be outdoors or enjoy activities in the city.\n",
"The weather in San Francisco is currently sunny. This means it's a clear day with good visibility and likely comfortable temperatures. \n",
"\n",
"However, the search result also includes an unusual comment about Geminis, which doesn't seem directly related to the weather. This appears to be some kind of joke or astrological reference that was included in the search results, but it's not relevant to the actual weather conditions.\n",
"\n",
"However, the search result also includes an unusual comment about Geminis. This appears to be unrelated to the weather and might be a joke or reference from the source. For accurate and detailed weather information, it would be best to check a reliable weather service or website.\n",
"To summarize: The current weather in San Francisco is sunny, which generally means good conditions for outdoor activities. If you're planning to go out, it would be a good idea to wear sunscreen and perhaps sunglasses to protect yourself from the sun's rays.\n",
"\n",
"Is there anything else you'd like to know about the weather in San Francisco or any other information you need?\n"
"Is there any other specific information about the weather in San Francisco that you'd like to know? I can search for more details such as temperature, humidity, or forecast if you're interested.\n"
]
}
],
Expand Down

0 comments on commit 3cdee7b

Please sign in to comment.