Skip to content

Commit

Permalink
styling + pending message for /fix
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelchia committed Jun 13, 2024
1 parent 40e38e6 commit 9a2bf31
Show file tree
Hide file tree
Showing 7 changed files with 49 additions and 27 deletions.
3 changes: 1 addition & 2 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
Follow Up Input: {question}
Standalone question:"""
CONDENSE_PROMPT = PromptTemplate.from_template(PROMPT_TEMPLATE)
PENDING_MESSAGE = "Searching learned documents"


class AskChatHandler(BaseChatHandler):
Expand Down Expand Up @@ -72,7 +71,7 @@ async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()

try:
with self.pending(PENDING_MESSAGE):
with self.pending("Searching learned documents"):
result = await self.llm_chain.acall({"question": query})
response = result["answer"]
self.reply(response, message)
Expand Down
4 changes: 1 addition & 3 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@

from .base import BaseChatHandler, SlashCommandRoutingType

PENDING_MESSAGE = "Thinking"


class DefaultChatHandler(BaseChatHandler):
id = "default"
Expand Down Expand Up @@ -47,7 +45,7 @@ def create_llm_chain(

async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()
with self.pending(PENDING_MESSAGE):
with self.pending("Thinking"):
response = await self.llm_chain.apredict(
input=message.body, stop=["\nHuman:"]
)
Expand Down
17 changes: 9 additions & 8 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,13 @@ async def process_message(self, message: HumanChatMessage):
extra_instructions = message.body[4:].strip() or "None."

self.get_llm_chain()
response = await self.llm_chain.apredict(
extra_instructions=extra_instructions,
stop=["\nHuman:"],
cell_content=selection.source,
error_name=selection.error.name,
error_value=selection.error.value,
traceback="\n".join(selection.error.traceback),
)
with self.pending("Analyzing error"):
response = await self.llm_chain.apredict(
extra_instructions=extra_instructions,
stop=["\nHuman:"],
cell_content=selection.source,
error_name=selection.error.name,
error_value=selection.error.value,
traceback="\n".join(selection.error.traceback),
)
self.reply(response, message)
2 changes: 0 additions & 2 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,6 @@ async def process_message(self, message: HumanChatMessage):
# delete and relearn index if embedding model was changed
await self.delete_and_relearn()

# if args.verbose:
# self.reply(f"Loading and splitting files for {load_path}", message)
with self.pending(f"Loading and splitting files for {load_path}"):
try:
await self.learn_dir(
Expand Down
37 changes: 25 additions & 12 deletions packages/jupyter-ai/src/components/pending-messages.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import { Box } from '@mui/material';
import { AiService } from '../handler';
import { ChatMessageHeader } from './chat-messages';

const PENDING_MESSAGE_CLASS = 'jp-ai-pending-message';

type PendingMessagesProps = {
messages: AiService.PendingMessage[];
};
Expand All @@ -22,19 +24,26 @@ type PendingMessageGroup = {
};

function PendingMessageElement(props: PendingMessageElementProps): JSX.Element {
if (!props.ellipsis) {
return <span>{props.text}</span>;
}
const [dots, setDots] = useState('');
let text = props.text;
if (props.ellipsis) {
const [dots, setDots] = useState('');

useEffect(() => {
const interval = setInterval(() => {
setDots(dots => (dots.length < 3 ? dots + '.' : ''));
}, 500);
useEffect(() => {
const interval = setInterval(() => {
setDots(dots => (dots.length < 3 ? dots + '.' : ''));
}, 500);

return () => clearInterval(interval);
}, []);
return <span>{props.text + dots}</span>;
return () => clearInterval(interval);
}, []);
text = props.text + dots;
}
return (
<div>
{text.split('\n').map((line, index) => (
<p key={index}>{line}</p>
))}
</div>
);
}

export function PendingMessages(props: PendingMessagesProps): JSX.Element {
Expand Down Expand Up @@ -85,7 +94,11 @@ export function PendingMessages(props: PendingMessagesProps): JSX.Element {
sx={{ marginBottom: 3 }}
/>
{group.messages.map((message, j) => (
<Box key={j} sx={{ padding: 2 }}>
<Box
className={PENDING_MESSAGE_CLASS}
key={j}
sx={{ marginBottom: 1 }}
>
<PendingMessageElement
text={message.body}
ellipsis={message.ellipsis}
Expand Down
1 change: 1 addition & 0 deletions packages/jupyter-ai/style/base.css
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@
@import './expandable-text-field.css';
@import './chat-settings.css';
@import './rendermime-markdown.css';
@import './pending-message.css';
12 changes: 12 additions & 0 deletions packages/jupyter-ai/style/pending-message.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
.jp-ai-pending-message {
padding-right: 0;
color: var(--jp-ui-font-color2);
}

div.jp-ai-pending-message:not(:last-child) {
margin-bottom: 2em;
}

.jp-ai-pending-message p {
line-height: 0.6;
}

0 comments on commit 9a2bf31

Please sign in to comment.