Skip to content

Commit

Permalink
Merge branch 'preferred_dir' of github.com:andrewfulton9/jupyter-ai i…
Browse files Browse the repository at this point in the history
…nto preferred_dir
  • Loading branch information
andrewfulton9 committed Jul 9, 2024
2 parents bd6b77e + 3e04d58 commit 77dda6c
Show file tree
Hide file tree
Showing 7 changed files with 102 additions and 20 deletions.
17 changes: 6 additions & 11 deletions packages/jupyter-ai-magics/jupyter_ai_magics/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,7 @@ async def stream_inline_completions(
chain = self._create_completion_chain()
token = completion.token_from_request(request, 0)
model_arguments = completion.template_inputs_from_request(request)
suggestion = ""
suggestion = processed_suggestion = ""

# send an incomplete `InlineCompletionReply`, indicating to the
# client that LLM output is about to streamed across this connection.
Expand All @@ -505,25 +505,20 @@ async def stream_inline_completions(

async for fragment in chain.astream(input=model_arguments):
suggestion += fragment
if suggestion.startswith("```"):
if "\n" not in suggestion:
# we are not ready to apply post-processing
continue
else:
suggestion = completion.post_process_suggestion(suggestion, request)
elif suggestion.rstrip().endswith("```"):
suggestion = completion.post_process_suggestion(suggestion, request)
processed_suggestion = completion.post_process_suggestion(
suggestion, request
)
yield InlineCompletionStreamChunk(
type="stream",
response={"insertText": suggestion, "token": token},
response={"insertText": processed_suggestion, "token": token},
reply_to=request.number,
done=False,
)

# finally, send a message confirming that we are done
yield InlineCompletionStreamChunk(
type="stream",
response={"insertText": suggestion, "token": token},
response={"insertText": processed_suggestion, "token": token},
reply_to=request.number,
done=True,
)
Expand Down
39 changes: 34 additions & 5 deletions packages/jupyter-ai/jupyter_ai/tests/completions/test_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,13 +98,16 @@ async def test_handle_request(inline_handler):
assert suggestions[0].insertText == "Test response"


expected_suggestions_cases = [
("```python\nTest python code\n```", "Test python code"),
("```\ntest\n```\n \n", "test"),
("```hello```world```", "hello```world"),
]


@pytest.mark.parametrize(
"response,expected_suggestion",
[
("```python\nTest python code\n```", "Test python code"),
("```\ntest\n```\n \n", "test"),
("```hello```world```", "hello```world"),
],
expected_suggestions_cases,
)
async def test_handle_request_with_spurious_fragments(response, expected_suggestion):
inline_handler = MockCompletionHandler(
Expand All @@ -128,6 +131,32 @@ async def test_handle_request_with_spurious_fragments(response, expected_suggest
assert suggestions[0].insertText == expected_suggestion


@pytest.mark.parametrize(
"response,expected_suggestion",
expected_suggestions_cases,
)
async def test_handle_request_with_spurious_fragments_stream(
response, expected_suggestion
):
inline_handler = MockCompletionHandler(
lm_provider=MockProvider,
lm_provider_params={
"model_id": "model",
"responses": [response],
},
)
dummy_request = InlineCompletionRequest(
number=1, prefix="", suffix="", mime="", stream=True
)

await inline_handler.handle_stream_request(dummy_request)
assert len(inline_handler.messages) == 3
# the streamed fragment should not include spurious fragments
assert inline_handler.messages[1].response.insertText == expected_suggestion
# the final state should not include spurious fragments either
assert inline_handler.messages[2].response.insertText == expected_suggestion


async def test_handle_stream_request():
inline_handler = MockCompletionHandler(
lm_provider=MockProvider,
Expand Down
8 changes: 8 additions & 0 deletions packages/jupyter-ai/schema/plugin.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@
"description": "JupyterLab generative artificial intelligence integration.",
"jupyter.lab.setting-icon": "jupyter-ai::chat",
"jupyter.lab.setting-icon-label": "Jupyter AI Chat",
"jupyter.lab.shortcuts": [
{
"command": "jupyter-ai:focus-chat-input",
"keys": ["Accel Shift 1"],
"selector": "body",
"preventDefault": false
}
],
"additionalProperties": false,
"type": "object"
}
23 changes: 22 additions & 1 deletion packages/jupyter-ai/src/components/chat-input.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import React, { useEffect, useState } from 'react';
import React, { useEffect, useRef, useState } from 'react';

import {
Autocomplete,
Expand All @@ -22,6 +22,7 @@ import {
HideSource,
AutoFixNormal
} from '@mui/icons-material';
import { ISignal } from '@lumino/signaling';

import { AiService } from '../handler';
import { SendButton, SendButtonProps } from './chat-input/send-button';
Expand All @@ -33,6 +34,7 @@ type ChatInputProps = {
onSend: (selection?: AiService.Selection) => unknown;
hasSelection: boolean;
includeSelection: boolean;
focusInputSignal: ISignal<unknown, void>;
toggleIncludeSelection: () => unknown;
replaceSelection: boolean;
toggleReplaceSelection: () => unknown;
Expand Down Expand Up @@ -131,6 +133,24 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
// controls whether the slash command autocomplete is open
const [open, setOpen] = useState<boolean>(false);

// store reference to the input element to enable focusing it easily
const inputRef = useRef<HTMLInputElement>();

/**
* Effect: connect the signal emitted on input focus request.
*/
useEffect(() => {
const focusInputElement = () => {
if (inputRef.current) {
inputRef.current.focus();
}
};
props.focusInputSignal.connect(focusInputElement);
return () => {
props.focusInputSignal.disconnect(focusInputElement);
};
}, []);

/**
* Effect: Open the autocomplete when the user types a slash into an empty
* chat input. Close the autocomplete when the user clears the chat input.
Expand Down Expand Up @@ -284,6 +304,7 @@ export function ChatInput(props: ChatInputProps): JSX.Element {
multiline
placeholder="Ask Jupyternaut"
onKeyDown={handleKeyDown}
inputRef={inputRef}
InputProps={{
...params.InputProps,
endAdornment: (
Expand Down
6 changes: 6 additions & 0 deletions packages/jupyter-ai/src/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import ArrowBackIcon from '@mui/icons-material/ArrowBack';
import type { Awareness } from 'y-protocols/awareness';
import type { IThemeManager } from '@jupyterlab/apputils';
import { IRenderMimeRegistry } from '@jupyterlab/rendermime';
import { ISignal } from '@lumino/signaling';

import { JlThemeProvider } from './jl-theme-provider';
import { ChatMessages } from './chat-messages';
Expand All @@ -31,10 +32,12 @@ type ChatBodyProps = {
chatHandler: ChatHandler;
setChatView: (view: ChatView) => void;
rmRegistry: IRenderMimeRegistry;
focusInputSignal: ISignal<unknown, void>;
};

function ChatBody({
chatHandler,
focusInputSignal,
setChatView: chatViewHandler,
rmRegistry: renderMimeRegistry
}: ChatBodyProps): JSX.Element {
Expand Down Expand Up @@ -162,6 +165,7 @@ function ChatBody({
onSend={onSend}
hasSelection={!!textSelection?.text}
includeSelection={includeSelection}
focusInputSignal={focusInputSignal}
toggleIncludeSelection={() =>
setIncludeSelection(includeSelection => !includeSelection)
}
Expand Down Expand Up @@ -192,6 +196,7 @@ export type ChatProps = {
completionProvider: IJaiCompletionProvider | null;
openInlineCompleterSettings: () => void;
activeCellManager: ActiveCellManager;
focusInputSignal: ISignal<unknown, void>;
};

enum ChatView {
Expand Down Expand Up @@ -244,6 +249,7 @@ export function Chat(props: ChatProps): JSX.Element {
chatHandler={props.chatHandler}
setChatView={setView}
rmRegistry={props.rmRegistry}
focusInputSignal={props.focusInputSignal}
/>
)}
{view === ChatView.Settings && (
Expand Down
24 changes: 22 additions & 2 deletions packages/jupyter-ai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,17 @@ import { statusItemPlugin } from './status';
import { IJaiCompletionProvider } from './tokens';
import { IRenderMimeRegistry } from '@jupyterlab/rendermime';
import { ActiveCellManager } from './contexts/active-cell-context';
import { Signal } from '@lumino/signaling';

export type DocumentTracker = IWidgetTracker<IDocumentWidget>;

export namespace CommandIDs {
/**
* Command to focus the input.
*/
export const focusChatInput = 'jupyter-ai:focus-chat-input';
}

/**
* Initialization data for the jupyter_ai extension.
*/
Expand Down Expand Up @@ -66,7 +74,9 @@ const plugin: JupyterFrontEndPlugin<void> = {
});
};

let chatWidget: ReactWidget | null = null;
const focusInputSignal = new Signal<unknown, void>({});

let chatWidget: ReactWidget;
try {
await chatHandler.initialize();
chatWidget = buildChatSidebar(
Expand All @@ -77,7 +87,8 @@ const plugin: JupyterFrontEndPlugin<void> = {
rmRegistry,
completionProvider,
openInlineCompleterSettings,
activeCellManager
activeCellManager,
focusInputSignal
);
} catch (e) {
chatWidget = buildErrorWidget(themeManager);
Expand All @@ -91,6 +102,15 @@ const plugin: JupyterFrontEndPlugin<void> = {
if (restorer) {
restorer.add(chatWidget, 'jupyter-ai-chat');
}

// Define jupyter-ai commands
app.commands.addCommand(CommandIDs.focusChatInput, {
execute: () => {
app.shell.activateById(chatWidget.id);
focusInputSignal.emit();
},
label: 'Focus the jupyter-ai chat'
});
}
};

Expand Down
5 changes: 4 additions & 1 deletion packages/jupyter-ai/src/widgets/chat-sidebar.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import React from 'react';
import { ISignal } from '@lumino/signaling';
import { ReactWidget } from '@jupyterlab/apputils';
import type { IThemeManager } from '@jupyterlab/apputils';
import type { Awareness } from 'y-protocols/awareness';
Expand All @@ -19,7 +20,8 @@ export function buildChatSidebar(
rmRegistry: IRenderMimeRegistry,
completionProvider: IJaiCompletionProvider | null,
openInlineCompleterSettings: () => void,
activeCellManager: ActiveCellManager
activeCellManager: ActiveCellManager,
focusInputSignal: ISignal<unknown, void>
): ReactWidget {
const ChatWidget = ReactWidget.create(
<Chat
Expand All @@ -31,6 +33,7 @@ export function buildChatSidebar(
completionProvider={completionProvider}
openInlineCompleterSettings={openInlineCompleterSettings}
activeCellManager={activeCellManager}
focusInputSignal={focusInputSignal}
/>
);
ChatWidget.id = 'jupyter-ai::chat';
Expand Down

0 comments on commit 77dda6c

Please sign in to comment.