Skip to content

Commit

Permalink
only replace prompt if context variable in template
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelchia committed Sep 12, 2024
1 parent 6b0a53d commit 5aecb24
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()
received_first_chunk = False

inputs = {"input": self.replace_prompt(message.body)}
inputs = {"input": message.body}
if "context" in self.prompt_template.input_variables:
# include context from context providers.
try:
Expand All @@ -114,6 +114,7 @@ async def process_message(self, message: HumanChatMessage):
self.reply(str(e), message)
return
inputs["context"] = context_prompt
inputs["input"] = self.replace_prompt(inputs["input"])

# start with a pending message
with self.pending("Generating response", message) as pending_message:
Expand Down

0 comments on commit 5aecb24

Please sign in to comment.