From 5aecb24dd0ce174d77a0c8ab8fa0cdd5e01c6669 Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 13 Sep 2024 01:42:52 +0800 Subject: [PATCH] only replace prompt if context variable in template --- packages/jupyter-ai/jupyter_ai/chat_handlers/default.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index e3aa45725..1ae1f034c 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -105,7 +105,7 @@ async def process_message(self, message: HumanChatMessage): self.get_llm_chain() received_first_chunk = False - inputs = {"input": self.replace_prompt(message.body)} + inputs = {"input": message.body} if "context" in self.prompt_template.input_variables: # include context from context providers. try: @@ -114,6 +114,7 @@ async def process_message(self, message: HumanChatMessage): self.reply(str(e), message) return inputs["context"] = context_prompt + inputs["input"] = self.replace_prompt(inputs["input"]) # start with a pending message with self.pending("Generating response", message) as pending_message: