Skip to content

Commit

Permalink
Add /fix slash command (jupyterlab#828)
Browse files Browse the repository at this point in the history
* remove unused clear_memory() method in default.py

* implement error output signaling and retrieval in ActiveCellContext

* fixup

* implement /fix slash command

* pre-commit

* add docstring

* edit reply message when no active cell with error output exists

* add user documentation and screenshots for /fix
  • Loading branch information
dlqqq authored Jun 12, 2024
1 parent e214ee7 commit 289d39d
Show file tree
Hide file tree
Showing 18 changed files with 481 additions and 80 deletions.
Binary file added docs/source/_static/fix-error-cell-selected.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/source/_static/fix-response.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
30 changes: 30 additions & 0 deletions docs/source/users/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -497,6 +497,36 @@ The `/learn` command also provides downloading and processing papers from the [a
Use the `/export` command to export the chat history from the current session to a markdown file named `chat_history-YYYY-MM-DD-HH-mm.md`. Using `/export <file_name>` will export the chat history to `<file_name>-YYYY-MM-DD-HH-mm.md` instead. You can export chat history as many times as you like in a single session. Each successive export will include the entire chat history up to that point in the session.


### Fixing a code cell with an error

The `/fix` command can be used to fix any code cell with an error output in a
Jupyter notebook file. To start, type `/fix` into the chat input. Jupyter AI
will then prompt you to select a cell with error output before sending the
request.

<img src="../_static/fix-no-error-cell-selected.png"
alt='Screenshot of the chat input containing `/fix` without a code cell with error output selected.'
class="screenshot" />

Then click on a code cell with error output. A blue bar should appear
immediately to the left of the code cell.

<img src="../_static/fix-error-cell-selected.png"
alt='Screenshot of a code cell with error output selected.'
class="screenshot" />

After this, the Send button to the right of the chat input will be enabled, and
you can use your mouse or keyboard to send `/fix` to Jupyternaut. The code cell
and its associated error output are included in the message automatically. When
complete, Jupyternaut will reply with suggested code that should fix the error.
You can use the action toolbar under each code block to quickly replace the
contents of the failing cell.

<img src="../_static/fix-response.png"
alt='Screenshot of a response from `/fix`, with the "Replace active cell" action hovered.'
class="screenshot" style="max-width:65%" />


### Additional chat commands

To clear the chat panel, use the `/clear` command. This does not reset the AI model; the model may still remember previous messages that you sent it, and it may use them to inform its responses.
Expand Down
1 change: 1 addition & 0 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .clear import ClearChatHandler
from .default import DefaultChatHandler
from .export import ExportChatHandler
from .fix import FixChatHandler
from .generate import GenerateChatHandler
from .help import HelpChatHandler
from .learn import LearnChatHandler
13 changes: 0 additions & 13 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,19 +43,6 @@ def create_llm_chain(
llm=llm, prompt=prompt_template, verbose=True, memory=self.memory
)

def clear_memory(self):
# clear chain memory
if self.memory:
self.memory.clear()

# clear transcript for existing chat clients
reply_message = ClearMessage()
self.reply(reply_message)

# clear transcript for new chat clients
if self._chat_history:
self._chat_history.clear()

async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()
response = await self.llm_chain.apredict(input=message.body, stop=["\nHuman:"])
Expand Down
103 changes: 103 additions & 0 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from typing import Dict, Type

from jupyter_ai.models import CellWithErrorSelection, HumanChatMessage
from jupyter_ai_magics.providers import BaseProvider
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate

from .base import BaseChatHandler, SlashCommandRoutingType

FIX_STRING_TEMPLATE = """
You are Jupyternaut, a conversational assistant living in JupyterLab. Please fix
the notebook cell described below.
Additional instructions:
{extra_instructions}
Input cell:
```
{cell_content}
```
Output error:
```
{traceback}
{error_name}: {error_value}
```
""".strip()

FIX_PROMPT_TEMPLATE = PromptTemplate(
input_variables=[
"extra_instructions",
"cell_content",
"traceback",
"error_name",
"error_value",
],
template=FIX_STRING_TEMPLATE,
)


class FixChatHandler(BaseChatHandler):
"""
Accepts a `HumanChatMessage` that includes a cell with error output and
recommends a fix as a reply. If a cell with error output is not included,
this chat handler does nothing.
`/fix` also accepts additional instructions in natural language as an
arbitrary number of arguments, e.g.
```
/fix use the numpy library to implement this function instead.
```
"""

id = "fix"
name = "Fix error cell"
help = "Fix an error cell selected in your notebook"
routing_type = SlashCommandRoutingType(slash_id="fix")
uses_llm = True

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

def create_llm_chain(
self, provider: Type[BaseProvider], provider_params: Dict[str, str]
):
unified_parameters = {
**provider_params,
**(self.get_model_parameters(provider, provider_params)),
}
llm = provider(**unified_parameters)

self.llm = llm
self.llm_chain = LLMChain(llm=llm, prompt=FIX_PROMPT_TEMPLATE, verbose=True)

async def process_message(self, message: HumanChatMessage):
if not (message.selection and message.selection.type == "cell-with-error"):
self.reply(
"`/fix` requires an active code cell with error output. Please click on a cell with error output and retry.",
message,
)
return

# hint type of selection
selection: CellWithErrorSelection = message.selection

# parse additional instructions specified after `/fix`
extra_instructions = message.body[4:].strip() or "None."

self.get_llm_chain()
response = await self.llm_chain.apredict(
extra_instructions=extra_instructions,
stop=["\nHuman:"],
cell_content=selection.source,
error_name=selection.error.name,
error_value=selection.error.value,
traceback="\n".join(selection.error.traceback),
)
self.reply(response, message)
5 changes: 5 additions & 0 deletions packages/jupyter-ai/jupyter_ai/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
ClearChatHandler,
DefaultChatHandler,
ExportChatHandler,
FixChatHandler,
GenerateChatHandler,
HelpChatHandler,
LearnChatHandler,
Expand Down Expand Up @@ -264,13 +265,17 @@ def initialize_settings(self):
ask_chat_handler = AskChatHandler(**chat_handler_kwargs, retriever=retriever)

export_chat_handler = ExportChatHandler(**chat_handler_kwargs)

fix_chat_handler = FixChatHandler(**chat_handler_kwargs)

jai_chat_handlers = {
"default": default_chat_handler,
"/ask": ask_chat_handler,
"/clear": clear_chat_handler,
"/generate": generate_chat_handler,
"/learn": learn_chat_handler,
"/export": export_chat_handler,
"/fix": fix_chat_handler,
}

help_chat_handler = HelpChatHandler(
Expand Down
1 change: 1 addition & 0 deletions packages/jupyter-ai/jupyter_ai/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ async def on_message(self, message):
id=chat_message_id,
time=time.time(),
body=chat_request.prompt,
selection=chat_request.selection,
client=self.chat_client,
)

Expand Down
21 changes: 21 additions & 0 deletions packages/jupyter-ai/jupyter_ai/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,29 @@
DEFAULT_CHUNK_OVERLAP = 100


class CellError(BaseModel):
name: str
value: str
traceback: List[str]


class CellWithErrorSelection(BaseModel):
type: Literal["cell-with-error"] = "cell-with-error"
source: str
error: CellError


Selection = Union[CellWithErrorSelection]


# the type of message used to chat with the agent
class ChatRequest(BaseModel):
prompt: str
# TODO: This currently is only used when a user runs the /fix slash command.
# In the future, the frontend should set the text selection on this field in
# the `HumanChatMessage` it sends to JAI, instead of appending the text
# selection to `body` in the frontend.
selection: Optional[Selection]


class ChatUser(BaseModel):
Expand Down Expand Up @@ -55,6 +75,7 @@ class HumanChatMessage(BaseModel):
time: float
body: str
client: ChatClient
selection: Optional[Selection]


class ConnectionMessage(BaseModel):
Expand Down
69 changes: 49 additions & 20 deletions packages/jupyter-ai/src/components/chat-input.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,28 @@ import {
FormGroup,
FormControlLabel,
Checkbox,
IconButton,
InputAdornment,
Typography
} from '@mui/material';
import SendIcon from '@mui/icons-material/Send';
import {
Download,
FindInPage,
Help,
MoreHoriz,
MenuBook,
School,
HideSource
HideSource,
AutoFixNormal
} from '@mui/icons-material';

import { AiService } from '../handler';
import { SendButton, SendButtonProps } from './chat-input/send-button';
import { useActiveCellContext } from '../contexts/active-cell-context';

type ChatInputProps = {
value: string;
onChange: (newValue: string) => unknown;
onSend: () => unknown;
onSend: (selection?: AiService.Selection) => unknown;
hasSelection: boolean;
includeSelection: boolean;
toggleIncludeSelection: () => unknown;
Expand All @@ -56,6 +57,7 @@ const DEFAULT_SLASH_COMMAND_ICONS: Record<string, JSX.Element> = {
ask: <FindInPage />,
clear: <HideSource />,
export: <Download />,
fix: <AutoFixNormal />,
generate: <MenuBook />,
help: <Help />,
learn: <School />,
Expand Down Expand Up @@ -101,6 +103,8 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
const [slashCommandOptions, setSlashCommandOptions] = useState<
SlashCommandOption[]
>([]);
const [currSlashCommand, setCurrSlashCommand] = useState<string | null>(null);
const activeCell = useActiveCellContext();

/**
* Effect: fetch the list of available slash commands from the backend on
Expand Down Expand Up @@ -129,8 +133,7 @@ export function ChatInput(props: ChatInputProps): JSX.Element {

/**
* Effect: Open the autocomplete when the user types a slash into an empty
* chat input. Close the autocomplete and reset the last selected value when
* the user clears the chat input.
* chat input. Close the autocomplete when the user clears the chat input.
*/
useEffect(() => {
if (props.value === '/') {
Expand All @@ -144,6 +147,35 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
}
}, [props.value]);

/**
* Effect: Set current slash command
*/
useEffect(() => {
const matchedSlashCommand = props.value.match(/^\s*\/\w+/);
setCurrSlashCommand(matchedSlashCommand && matchedSlashCommand[0]);
}, [props.value]);

// TODO: unify the `onSend` implementation in `chat.tsx` and here once text
// selection is refactored.
function onSend() {
// case: /fix
if (currSlashCommand === '/fix') {
const cellWithError = activeCell.manager.getContent(true);
if (!cellWithError) {
return;
}

props.onSend({
...cellWithError,
type: 'cell-with-error'
});
return;
}

// default case
props.onSend();
}

function handleKeyDown(event: React.KeyboardEvent<HTMLDivElement>) {
if (event.key !== 'Enter') {
return;
Expand All @@ -160,7 +192,7 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
((props.sendWithShiftEnter && event.shiftKey) ||
(!props.sendWithShiftEnter && !event.shiftKey))
) {
props.onSend();
onSend();
event.stopPropagation();
event.preventDefault();
}
Expand All @@ -177,6 +209,15 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
</span>
);

const inputExists = !!props.value.trim();
const sendButtonProps: SendButtonProps = {
onSend,
sendWithShiftEnter: props.sendWithShiftEnter,
inputExists,
activeCellHasError: activeCell.hasError,
currSlashCommand
};

return (
<Box sx={props.sx}>
<Autocomplete
Expand Down Expand Up @@ -246,19 +287,7 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
...params.InputProps,
endAdornment: (
<InputAdornment position="end">
<IconButton
size="small"
color="primary"
onClick={props.onSend}
disabled={!props.value.trim().length}
title={
props.sendWithShiftEnter
? 'Send message (SHIFT+ENTER)'
: 'Send message (ENTER)'
}
>
<SendIcon />
</IconButton>
<SendButton {...sendButtonProps} />
</InputAdornment>
)
}}
Expand Down
Loading

0 comments on commit 289d39d

Please sign in to comment.