From 3581dce4c6688210ef91a889f2f70c81224a5242 Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Mon, 2 Dec 2024 17:57:43 -0800 Subject: [PATCH] Use indexing --- .gitignore | 1 + langgraph.json | 8 +++++++- pyproject.toml | 2 +- src/memory_agent/graph.py | 4 +++- src/memory_agent/tools.py | 2 +- 5 files changed, 13 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index b8af3ff..bfc4a1b 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ __pycache__/ # C extensions *.so +.langgraph_api # Distribution / packaging .Python diff --git a/langgraph.json b/langgraph.json index baefb19..9fcf5d3 100644 --- a/langgraph.json +++ b/langgraph.json @@ -5,5 +5,11 @@ }, "env": ".env", "python_version": "3.11", - "dependencies": ["."] + "dependencies": ["."], + "store": { + "index": { + "dims": 1536, + "embed": "openai:text-embedding-3-small" + } + } } diff --git a/pyproject.toml b/pyproject.toml index 5a4f9a0..bae4c81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ dependencies = [ # Optional (for selecting different models) "langchain-openai>=0.2.1", "langchain-anthropic>=0.2.1", - "langchain>=0.3.1", + "langchain>=0.3.8", "langchain-core>=0.3.8", "python-dotenv>=1.0.1", "langgraph-sdk>=0.1.32", diff --git a/src/memory_agent/graph.py b/src/memory_agent/graph.py index ce134e4..8d96254 100644 --- a/src/memory_agent/graph.py +++ b/src/memory_agent/graph.py @@ -24,7 +24,9 @@ async def call_model(state: State, config: RunnableConfig, *, store: BaseStore) # Retrieve the most recent memories for context memories = await store.asearch( - ("memories", configurable.user_id), limit=10 + ("memories", configurable.user_id), + query=str([m.content for m in state.messages[-3:]]), + limit=10, ) # Format memories for inclusion in the prompt diff --git a/src/memory_agent/tools.py b/src/memory_agent/tools.py index a8667f6..f343d9d 100644 --- a/src/memory_agent/tools.py +++ b/src/memory_agent/tools.py @@ -40,4 +40,4 @@ async def upsert_memory( key=str(mem_id), value={"content": content, "context": context}, ) - return f"Stored memory {memory_id}" + return f"Stored memory {mem_id}"