diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index bb0df7cc4..6b7d1afa2 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -105,7 +105,7 @@ async def process_message(self, message: HumanChatMessage): self.get_llm_chain() received_first_chunk = False - inputs = {"input": self.replace_prompt(message.body)} + inputs = {"input": message.body} if "context" in self.prompt_template.input_variables: # include context from context providers. try: @@ -114,6 +114,7 @@ async def process_message(self, message: HumanChatMessage): self.reply(str(e), message) return inputs["context"] = context_prompt + inputs["input"] = self.replace_prompt(inputs["input"]) # start with a pending message with self.pending("Generating response", message) as pending_message: