From 4a15e5c26141fedd9ef5f481fe53ea1372d8a2f2 Mon Sep 17 00:00:00 2001 From: ZhangShenao <15201440436@163.com> Date: Mon, 23 Dec 2024 14:26:14 +0800 Subject: [PATCH 1/3] Fix docs of ChatMLX --- docs/docs/integrations/chat/mlx.ipynb | 77 ++++++++++++++++++++++++--- 1 file changed, 69 insertions(+), 8 deletions(-) diff --git a/docs/docs/integrations/chat/mlx.ipynb b/docs/docs/integrations/chat/mlx.ipynb index a5945dffae408..79fe05ea8ea6d 100644 --- a/docs/docs/integrations/chat/mlx.ipynb +++ b/docs/docs/integrations/chat/mlx.ipynb @@ -16,9 +16,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[31mERROR: Cannot install mlx-lm==0.0.1, mlx-lm==0.0.10, mlx-lm==0.0.11, mlx-lm==0.0.12, mlx-lm==0.0.13, mlx-lm==0.0.14, mlx-lm==0.0.2, mlx-lm==0.0.3, mlx-lm==0.0.5, mlx-lm==0.0.6, mlx-lm==0.0.7, mlx-lm==0.0.8, mlx-lm==0.0.9, mlx-lm==0.1.0, mlx-lm==0.10.0, mlx-lm==0.11.0, mlx-lm==0.12.0, mlx-lm==0.12.1, mlx-lm==0.13.0, mlx-lm==0.13.1, mlx-lm==0.14.0, mlx-lm==0.14.1, mlx-lm==0.14.2, mlx-lm==0.14.3, mlx-lm==0.15.0, mlx-lm==0.15.1, mlx-lm==0.15.2, mlx-lm==0.15.3, mlx-lm==0.16.0, mlx-lm==0.16.1, mlx-lm==0.17.0, mlx-lm==0.17.1, mlx-lm==0.18.1, mlx-lm==0.18.2, mlx-lm==0.19.0, mlx-lm==0.19.1, mlx-lm==0.19.2, mlx-lm==0.19.3, mlx-lm==0.2.0, mlx-lm==0.20.1, mlx-lm==0.20.2, mlx-lm==0.20.3, mlx-lm==0.20.4, mlx-lm==0.3.0, mlx-lm==0.4.0, mlx-lm==0.5.0, mlx-lm==0.6.0, mlx-lm==0.7.0, mlx-lm==0.8.0 and mlx-lm==0.9.0 because these package versions have conflicting dependencies.\u001b[0m\u001b[31m\n", + "\u001b[0m\u001b[31mERROR: ResolutionImpossible: for help visit https://pip.pypa.io/en/latest/topics/dependency-resolution/#dealing-with-dependency-conflicts\u001b[0m\u001b[31m\n", + "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" + ] + } + ], "source": [ "%pip install --upgrade --quiet mlx-lm transformers huggingface_hub" ] @@ -34,9 +44,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'langchain_community'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mlangchain_community\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mllms\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmlx_pipeline\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m MLXPipeline\n\u001b[1;32m 3\u001b[0m llm \u001b[38;5;241m=\u001b[39m MLXPipeline\u001b[38;5;241m.\u001b[39mfrom_model_id(\n\u001b[1;32m 4\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmlx-community/quantized-gemma-2b-it\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 5\u001b[0m pipeline_kwargs\u001b[38;5;241m=\u001b[39m{\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmax_tokens\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;241m10\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtemp\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;241m0.1\u001b[39m},\n\u001b[1;32m 6\u001b[0m )\n", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'langchain_community'" + ] + } + ], "source": [ "from langchain_community.llms.mlx_pipeline import MLXPipeline\n", "\n", @@ -154,9 +176,48 @@ "# setup tools\n", "tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\n", "\n", - "# setup ReAct style prompt\n", - "prompt = hub.pull(\"hwchase17/react-json\")\n", - "prompt = prompt.partial(\n", + "# setup ReAct style prompt,and remove system role\n", + "human_prompt = \"\"\"\n", + "Answer the following questions as best you can. You have access to the following tools:\n", + "\n", + "{tools}\n", + "\n", + "The way you use the tools is by specifying a json blob.\n", + "Specifically, this json should have a `action` key (with the name of the tool to use) and a `action_input` key (with the input to the tool going here).\n", + "\n", + "The only values that should be in the \"action\" field are: {tool_names}\n", + "\n", + "The $JSON_BLOB should only contain a SINGLE action, do NOT return a list of multiple actions. Here is an example of a valid $JSON_BLOB:\n", + "\n", + "```\n", + "{{\n", + " \"action\": $TOOL_NAME,\n", + " \"action_input\": $INPUT\n", + "}}\n", + "```\n", + "\n", + "ALWAYS use the following format:\n", + "\n", + "Question: the input question you must answer\n", + "Thought: you should always think about what to do\n", + "Action:\n", + "```\n", + "$JSON_BLOB\n", + "```\n", + "Observation: the result of the action\n", + "... (this Thought/Action/Observation can repeat N times)\n", + "Thought: I now know the final answer\n", + "Final Answer: the final answer to the original input question\n", + "\n", + "Begin! Reminder to always use the exact characters `Final Answer` when responding.\n", + "\n", + "{input}\n", + "\n", + "{agent_scratchpad}\n", + "\n", + "\"\"\"\n", + "\n", + "prompt = human_prompt.partial(\n", " tools=render_text_description(tools),\n", " tool_names=\", \".join([t.name for t in tools]),\n", ")\n", @@ -207,7 +268,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.18" + "version": "3.12.7" } }, "nbformat": 4, From c8778676b6cbfd3fb04196609dc6304792fa0d3a Mon Sep 17 00:00:00 2001 From: ZhangShenao <15201440436@163.com> Date: Mon, 23 Dec 2024 14:39:13 +0800 Subject: [PATCH 2/3] Fix docs of ChatMLX --- docs/docs/integrations/chat/mlx.ipynb | 30 ++++----------------------- 1 file changed, 4 insertions(+), 26 deletions(-) diff --git a/docs/docs/integrations/chat/mlx.ipynb b/docs/docs/integrations/chat/mlx.ipynb index 79fe05ea8ea6d..7f803716149c2 100644 --- a/docs/docs/integrations/chat/mlx.ipynb +++ b/docs/docs/integrations/chat/mlx.ipynb @@ -16,19 +16,9 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[31mERROR: Cannot install mlx-lm==0.0.1, mlx-lm==0.0.10, mlx-lm==0.0.11, mlx-lm==0.0.12, mlx-lm==0.0.13, mlx-lm==0.0.14, mlx-lm==0.0.2, mlx-lm==0.0.3, mlx-lm==0.0.5, mlx-lm==0.0.6, mlx-lm==0.0.7, mlx-lm==0.0.8, mlx-lm==0.0.9, mlx-lm==0.1.0, mlx-lm==0.10.0, mlx-lm==0.11.0, mlx-lm==0.12.0, mlx-lm==0.12.1, mlx-lm==0.13.0, mlx-lm==0.13.1, mlx-lm==0.14.0, mlx-lm==0.14.1, mlx-lm==0.14.2, mlx-lm==0.14.3, mlx-lm==0.15.0, mlx-lm==0.15.1, mlx-lm==0.15.2, mlx-lm==0.15.3, mlx-lm==0.16.0, mlx-lm==0.16.1, mlx-lm==0.17.0, mlx-lm==0.17.1, mlx-lm==0.18.1, mlx-lm==0.18.2, mlx-lm==0.19.0, mlx-lm==0.19.1, mlx-lm==0.19.2, mlx-lm==0.19.3, mlx-lm==0.2.0, mlx-lm==0.20.1, mlx-lm==0.20.2, mlx-lm==0.20.3, mlx-lm==0.20.4, mlx-lm==0.3.0, mlx-lm==0.4.0, mlx-lm==0.5.0, mlx-lm==0.6.0, mlx-lm==0.7.0, mlx-lm==0.8.0 and mlx-lm==0.9.0 because these package versions have conflicting dependencies.\u001b[0m\u001b[31m\n", - "\u001b[0m\u001b[31mERROR: ResolutionImpossible: for help visit https://pip.pypa.io/en/latest/topics/dependency-resolution/#dealing-with-dependency-conflicts\u001b[0m\u001b[31m\n", - "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" - ] - } - ], + "outputs": [], "source": [ "%pip install --upgrade --quiet mlx-lm transformers huggingface_hub" ] @@ -44,21 +34,9 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'langchain_community'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mlangchain_community\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mllms\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmlx_pipeline\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m MLXPipeline\n\u001b[1;32m 3\u001b[0m llm \u001b[38;5;241m=\u001b[39m MLXPipeline\u001b[38;5;241m.\u001b[39mfrom_model_id(\n\u001b[1;32m 4\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmlx-community/quantized-gemma-2b-it\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 5\u001b[0m pipeline_kwargs\u001b[38;5;241m=\u001b[39m{\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmax_tokens\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;241m10\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtemp\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;241m0.1\u001b[39m},\n\u001b[1;32m 6\u001b[0m )\n", - "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'langchain_community'" - ] - } - ], + "outputs": [], "source": [ "from langchain_community.llms.mlx_pipeline import MLXPipeline\n", "\n", From b9e6765f5d918731d30a3f6b24e431242fac8811 Mon Sep 17 00:00:00 2001 From: ZhangShenao <15201440436@163.com> Date: Mon, 23 Dec 2024 14:48:55 +0800 Subject: [PATCH 3/3] Fix docs of ChatMLX --- docs/docs/integrations/chat/mlx.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/docs/integrations/chat/mlx.ipynb b/docs/docs/integrations/chat/mlx.ipynb index 7f803716149c2..dc852da549d55 100644 --- a/docs/docs/integrations/chat/mlx.ipynb +++ b/docs/docs/integrations/chat/mlx.ipynb @@ -154,7 +154,8 @@ "# setup tools\n", "tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\n", "\n", - "# setup ReAct style prompt,and remove system role\n", + "# setup ReAct style prompt\n", + "# Based on 'hwchase17/react' prompt modification, cause mlx does not support the `System` role\n", "human_prompt = \"\"\"\n", "Answer the following questions as best you can. You have access to the following tools:\n", "\n",