From 67cfcf404367543c603eb0fd6c30bcab20dc9848 Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 13 Sep 2024 01:42:52 +0800 Subject: [PATCH] only replace prompt if context variable in template --- packages/jupyter-ai/jupyter_ai/chat_handlers/default.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index bb0df7cc4..6b7d1afa2 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -105,7 +105,7 @@ async def process_message(self, message: HumanChatMessage): self.get_llm_chain() received_first_chunk = False - inputs = {"input": self.replace_prompt(message.body)} + inputs = {"input": message.body} if "context" in self.prompt_template.input_variables: # include context from context providers. try: @@ -114,6 +114,7 @@ async def process_message(self, message: HumanChatMessage): self.reply(str(e), message) return inputs["context"] = context_prompt + inputs["input"] = self.replace_prompt(inputs["input"]) # start with a pending message with self.pending("Generating response", message) as pending_message: