diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py index 86606f3aa..79736f2cb 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py @@ -16,7 +16,6 @@ Follow Up Input: {question} Standalone question:""" CONDENSE_PROMPT = PromptTemplate.from_template(PROMPT_TEMPLATE) -PENDING_MESSAGE = "Searching learned documents" class AskChatHandler(BaseChatHandler): @@ -72,7 +71,7 @@ async def process_message(self, message: HumanChatMessage): self.get_llm_chain() try: - with self.pending(PENDING_MESSAGE): + with self.pending("Searching learned documents"): result = await self.llm_chain.acall({"question": query}) response = result["answer"] self.reply(response, message) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index dddbf1417..79fd29051 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -7,8 +7,6 @@ from .base import BaseChatHandler, SlashCommandRoutingType -PENDING_MESSAGE = "Thinking" - class DefaultChatHandler(BaseChatHandler): id = "default" @@ -47,7 +45,7 @@ def create_llm_chain( async def process_message(self, message: HumanChatMessage): self.get_llm_chain() - with self.pending(PENDING_MESSAGE): + with self.pending("Thinking"): response = await self.llm_chain.apredict( input=message.body, stop=["\nHuman:"] ) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py index 0f62e5681..f8c9f6f6b 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py @@ -92,12 +92,13 @@ async def process_message(self, message: HumanChatMessage): extra_instructions = message.body[4:].strip() or "None." self.get_llm_chain() - response = await self.llm_chain.apredict( - extra_instructions=extra_instructions, - stop=["\nHuman:"], - cell_content=selection.source, - error_name=selection.error.name, - error_value=selection.error.value, - traceback="\n".join(selection.error.traceback), - ) + with self.pending("Analyzing error"): + response = await self.llm_chain.apredict( + extra_instructions=extra_instructions, + stop=["\nHuman:"], + cell_content=selection.source, + error_name=selection.error.name, + error_value=selection.error.value, + traceback="\n".join(selection.error.traceback), + ) self.reply(response, message) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py index e7ebc7394..e8ca6bddc 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py @@ -151,8 +151,6 @@ async def process_message(self, message: HumanChatMessage): # delete and relearn index if embedding model was changed await self.delete_and_relearn() - # if args.verbose: - # self.reply(f"Loading and splitting files for {load_path}", message) with self.pending(f"Loading and splitting files for {load_path}"): try: await self.learn_dir( diff --git a/packages/jupyter-ai/src/components/pending-messages.tsx b/packages/jupyter-ai/src/components/pending-messages.tsx index 3db82d040..f7527c7ed 100644 --- a/packages/jupyter-ai/src/components/pending-messages.tsx +++ b/packages/jupyter-ai/src/components/pending-messages.tsx @@ -4,6 +4,8 @@ import { Box } from '@mui/material'; import { AiService } from '../handler'; import { ChatMessageHeader } from './chat-messages'; +const PENDING_MESSAGE_CLASS = 'jp-ai-pending-message'; + type PendingMessagesProps = { messages: AiService.PendingMessage[]; }; @@ -22,19 +24,26 @@ type PendingMessageGroup = { }; function PendingMessageElement(props: PendingMessageElementProps): JSX.Element { - if (!props.ellipsis) { - return {props.text}; - } - const [dots, setDots] = useState(''); + let text = props.text; + if (props.ellipsis) { + const [dots, setDots] = useState(''); - useEffect(() => { - const interval = setInterval(() => { - setDots(dots => (dots.length < 3 ? dots + '.' : '')); - }, 500); + useEffect(() => { + const interval = setInterval(() => { + setDots(dots => (dots.length < 3 ? dots + '.' : '')); + }, 500); - return () => clearInterval(interval); - }, []); - return {props.text + dots}; + return () => clearInterval(interval); + }, []); + text = props.text + dots; + } + return ( +
+ {text.split('\n').map((line, index) => ( +

{line}

+ ))} +
+ ); } export function PendingMessages(props: PendingMessagesProps): JSX.Element { @@ -85,7 +94,11 @@ export function PendingMessages(props: PendingMessagesProps): JSX.Element { sx={{ marginBottom: 3 }} /> {group.messages.map((message, j) => ( - +