From 3575b16df21d5c9016def9ca00609cf7d487791d Mon Sep 17 00:00:00 2001 From: Daniel Glogowski Date: Wed, 24 Jul 2024 21:21:00 -0700 Subject: [PATCH] chat nb update with tool calling and upstream changes --- .../docs/chat/nvidia_ai_endpoints.ipynb | 61 +++++++++++-------- 1 file changed, 35 insertions(+), 26 deletions(-) diff --git a/libs/ai-endpoints/docs/chat/nvidia_ai_endpoints.ipynb b/libs/ai-endpoints/docs/chat/nvidia_ai_endpoints.ipynb index e1025759..3009be7e 100644 --- a/libs/ai-endpoints/docs/chat/nvidia_ai_endpoints.ipynb +++ b/libs/ai-endpoints/docs/chat/nvidia_ai_endpoints.ipynb @@ -451,7 +451,7 @@ "id": "137662a6" }, "source": [ - "## Example usage within a Conversation Chains" + "## Example usage within a RunnableWithMessageHistory" ] }, { @@ -461,7 +461,7 @@ "id": "79efa62d" }, "source": [ - "Like any other integration, ChatNVIDIA is fine to support chat utilities like conversation buffers by default. Below, we show the [LangChain ConversationBufferMemory](https://python.langchain.com/docs/modules/memory/types/buffer) example applied to the `mistralai/mixtral-8x22b-instruct-v0.1` model." + "Like any other integration, ChatNVIDIA is fine to support chat utilities like RunnableWithMessageHistory which is analogous to using `ConversationChain`. Below, we show the [LangChain RunnableWithMessageHistory](https://api.python.langchain.com/en/latest/runnables/langchain_core.runnables.history.RunnableWithMessageHistory.html) example applied to the `mistralai/mixtral-8x22b-instruct-v0.1` model." ] }, { @@ -483,8 +483,19 @@ }, "outputs": [], "source": [ - "from langchain.chains import ConversationChain\n", - "from langchain.memory import ConversationBufferMemory\n", + "from langchain_core.chat_history import InMemoryChatMessageHistory\n", + "from langchain_core.runnables.history import RunnableWithMessageHistory\n", + "\n", + "# store is a dictionary that maps session IDs to their corresponding chat histories.\n", + "store = {} # memory is maintained outside the chain\n", + "\n", + "\n", + "# A function that returns the chat history for a given session ID.\n", + "def get_session_history(session_id: str) -> InMemoryChatMessageHistory:\n", + " if session_id not in store:\n", + " store[session_id] = InMemoryChatMessageHistory()\n", + " return store[session_id]\n", + "\n", "\n", "chat = ChatNVIDIA(\n", " model=\"mistralai/mixtral-8x22b-instruct-v0.1\",\n", @@ -493,24 +504,18 @@ " top_p=1.0,\n", ")\n", "\n", - "conversation = ConversationChain(llm=chat, memory=ConversationBufferMemory())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f644ff28", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 268 - }, - "id": "f644ff28", - "outputId": "bae354cc-2118-4e01-ce20-a717ac94d27d" - }, - "outputs": [], - "source": [ - "conversation.invoke(\"Hi there!\")[\"response\"]" + "# Define a RunnableConfig object, with a `configurable` key. session_id determines thread\n", + "config = {\"configurable\": {\"session_id\": \"1\"}}\n", + "\n", + "conversation = RunnableWithMessageHistory(\n", + " chat,\n", + " get_session_history,\n", + ")\n", + "\n", + "conversation.invoke(\n", + " \"Hi I'm Srijan Dubey.\", # input or query\n", + " config=config,\n", + ")" ] }, { @@ -527,9 +532,10 @@ }, "outputs": [], "source": [ - "conversation.invoke(\"I'm doing well! Just having a conversation with an AI.\")[\n", - " \"response\"\n", - "]" + "conversation.invoke(\n", + " \"I'm doing well! Just having a conversation with an AI.\",\n", + " config=config,\n", + ")" ] }, { @@ -546,7 +552,10 @@ }, "outputs": [], "source": [ - "conversation.invoke(\"Tell me about yourself.\")[\"response\"]" + "conversation.invoke(\n", + " \"Tell me about yourself.\",\n", + " config=config,\n", + ")" ] }, {