Skip to content

Commit

Permalink
updates to tools.py and pyproject.toml
Browse files Browse the repository at this point in the history
  • Loading branch information
srdas committed Sep 18, 2024
1 parent 7541886 commit 789953b
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 29 deletions.
2 changes: 1 addition & 1 deletion packages/jupyter-ai-magics/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ dependencies = [
"typing_extensions>=4.5.0",
"click~=8.0",
"jsonpath-ng>=1.5.3,<2",
"langgraph",
]

[project.optional-dependencies]
Expand All @@ -49,7 +50,6 @@ all = [
"langchain_nvidia_ai_endpoints",
"langchain_openai",
"langchain_ollama",
"langgraph",
"pillow",
"boto3",
"qianfan",
Expand Down
42 changes: 14 additions & 28 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/tools.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import argparse
import ast
import math
# import math

# LangGraph imports for using tools
import os
Expand All @@ -12,17 +12,17 @@
from jupyter_ai_magics.providers import BaseProvider
from langchain.chains import ConversationalRetrievalChain, LLMChain
from langchain.memory import ConversationBufferWindowMemory
from langchain_anthropic import ChatAnthropic
# from langchain_anthropic import ChatAnthropic

# Chat Providers (add more as needed)
from langchain_aws import ChatBedrock
from langchain_cohere import ChatCohere
# from langchain_aws import ChatBedrock
# from langchain_cohere import ChatCohere
from langchain_core.messages import AIMessage
from langchain_core.prompts import PromptTemplate
from langchain_core.tools import tool
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_ollama import ChatOllama
from langchain_openai import AzureChatOpenAI, ChatOpenAI
# from langchain_google_genai import ChatGoogleGenerativeAI
# from langchain_ollama import ChatOllama
# from langchain_openai import AzureChatOpenAI, ChatOpenAI
from langgraph.graph import MessagesState, StateGraph
from langgraph.prebuilt import ToolNode

Expand Down Expand Up @@ -95,12 +95,6 @@ def create_llm_chain(
llm=self.llm, prompt=CONDENSE_PROMPT, memory=memory, verbose=False
)

# #### TOOLS FOR USE WITH LANGGRAPH #####
"""
Bind tools to LLM and provide chat functionality.
Call:
/tools <query>
"""

def conditional_continue(self, state: MessagesState) -> Literal["tools", "__end__"]:
messages = state["messages"]
Expand All @@ -110,13 +104,13 @@ def conditional_continue(self, state: MessagesState) -> Literal["tools", "__end_
return "__end__"

# Get required tool files from ``.jupyter/jupyter-ai/tools/``
def getToolFiles(self, fpath):
if os.path.isfile(fpath):
file_paths = [fpath]
elif os.path.isdir(fpath):
def getToolFiles(self):
if os.path.isfile(self.tools_file_path):
file_paths = [self.tools_file_path]
elif os.path.isdir(self.tools_file_path):
file_paths = []
for filename in os.listdir(fpath):
file_paths.append(os.path.join(fpath, filename))
for filename in os.listdir(self.tools_file_path):
file_paths.append(os.path.join(self.tools_file_path, filename))
else:
self.reply("No tools found.")
return
Expand Down Expand Up @@ -147,15 +141,7 @@ def getToolNames(self, tools_file_path):
except FileNotFoundError as e: # to do
self.reply(f"Tools file not found at {tools_file_path}.")

def toolChat(self, query):
print("TOOL CHAT", query)
for chunk in self.app.stream(
{"messages": [("human", query)]}, stream_mode="values"
):
response = chunk["messages"][-1].pretty_print()
return response

##### MAIN FUNCTION #####
def useLLMwithTools(self, query):
"""
LangGraph documentation : https://langchain-ai.github.io/langgraph/tutorials/introduction/
Expand Down Expand Up @@ -187,7 +173,7 @@ def getTools(file_paths):
return tools

# Get tool file(s), then tools within tool files, and create tool node from tools
file_paths = self.getToolFiles(self.tools_file_path)
file_paths = self.getToolFiles()
tools = getTools(file_paths)
tool_node = ToolNode(tools)

Expand Down

0 comments on commit 789953b

Please sign in to comment.