Skip to content

Commit

Permalink
Merge branch 'jupyterlab:main' into refactor-init-config
Browse files Browse the repository at this point in the history
  • Loading branch information
andrii-i authored Dec 19, 2023
2 parents 339f911 + 79e345a commit 9c4832a
Show file tree
Hide file tree
Showing 9 changed files with 81 additions and 22 deletions.
5 changes: 5 additions & 0 deletions packages/jupyter-ai-magics/jupyter_ai_magics/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,11 @@ def __init__(self, **kwargs):
async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]:
return await self._call_in_executor(*args, **kwargs)

@property
def allows_concurrency(self):
# At present, GPT4All providers fail with concurrent messages. See #481.
return False


HUGGINGFACE_HUB_VALID_TASKS = (
"text2text-generation",
Expand Down
4 changes: 3 additions & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,11 @@ class AskChatHandler(BaseChatHandler):

id = "ask"
name = "Ask with Local Data"
help = "Asks a question with retrieval augmented generation (RAG)"
help = "Ask a question about your learned data"
routing_type = SlashCommandRoutingType(slash_id="ask")

uses_llm = True

def __init__(self, retriever, *args, **kwargs):
super().__init__(*args, **kwargs)

Expand Down
34 changes: 30 additions & 4 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,15 @@ class BaseChatHandler:

routing_type: HandlerRoutingType = ...

uses_llm: ClassVar[bool] = True
"""Class attribute specifying whether this chat handler uses the LLM
specified by the config. Subclasses should define this. Should be set to
`False` for handlers like `/help`."""

_requests_count = 0
"""Class attribute set to the number of requests that Jupyternaut is
currently handling."""

def __init__(
self,
log: Logger,
Expand All @@ -82,11 +91,26 @@ def __init__(

async def on_message(self, message: HumanChatMessage):
"""
Method which receives a human message and processes it via
`self.process_message()`, calling `self.handle_exc()` when an exception
is raised. This method is called by RootChatHandler when it routes a
human message to this chat handler.
Method which receives a human message, calls `self.get_llm_chain()`, and
processes the message via `self.process_message()`, calling
`self.handle_exc()` when an exception is raised. This method is called
by RootChatHandler when it routes a human message to this chat handler.
"""

# check whether the configured LLM can support a request at this time.
if self.uses_llm and BaseChatHandler._requests_count > 0:
lm_provider_klass = self.config_manager.lm_provider
lm_provider_params = self.config_manager.lm_provider_params
lm_provider = lm_provider_klass(**lm_provider_params)

if not lm_provider.allows_concurrency:
self.reply(
"The currently selected language model can process only one request at a time. Please wait for me to reply before sending another question.",
message,
)
return

BaseChatHandler._requests_count += 1
try:
await self.process_message(message)
except Exception as e:
Expand All @@ -96,6 +120,8 @@ async def on_message(self, message: HumanChatMessage):
await self.handle_exc(e, message)
except Exception as e:
await self._default_handle_exc(e, message)
finally:
BaseChatHandler._requests_count -= 1

async def process_message(self, message: HumanChatMessage):
"""
Expand Down
4 changes: 3 additions & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@
class ClearChatHandler(BaseChatHandler):
id = "clear"
name = "Clear chat messages"
help = "Clears the displayed chat message history only; does not clear the context sent to chat providers"
help = "Clear the chat window"
routing_type = SlashCommandRoutingType(slash_id="clear")

uses_llm = False

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

Expand Down
2 changes: 2 additions & 0 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ class DefaultChatHandler(BaseChatHandler):
help = "Responds to prompts that are not otherwise handled by a chat handler"
routing_type = SlashCommandRoutingType(slash_id=None)

uses_llm = True

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.memory = ConversationBufferWindowMemory(return_messages=True, k=2)
Expand Down
4 changes: 3 additions & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,9 +218,11 @@ def create_notebook(outline):
class GenerateChatHandler(BaseChatHandler):
id = "generate"
name = "Generate Notebook"
help = "Generates a Jupyter notebook, including name, outline, and section contents"
help = "Generate a Jupyter notebook from a text prompt"
routing_type = SlashCommandRoutingType(slash_id="generate")

uses_llm = True

def __init__(self, preferred_dir: str, log_dir: Optional[str], *args, **kwargs):
super().__init__(*args, **kwargs)
self.log_dir = Path(log_dir) if log_dir else None
Expand Down
32 changes: 21 additions & 11 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/help.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import time
from typing import List
from typing import Dict
from uuid import uuid4

from jupyter_ai.models import AgentChatMessage, HumanChatMessage
Expand All @@ -8,34 +8,44 @@

HELP_MESSAGE = """Hi there! I'm Jupyternaut, your programming assistant.
You can ask me a question using the text box below. You can also use these commands:
* `/learn` — Teach Jupyternaut about files on your system
* `/ask` — Ask a question about your learned data
* `/generate` — Generate a Jupyter notebook from a text prompt
* `/clear` — Clear the chat window
* `/help` — Display this help message
{commands}
Jupyter AI includes [magic commands](https://jupyter-ai.readthedocs.io/en/latest/users/index.html#the-ai-and-ai-magic-commands) that you can use in your notebooks.
For more information, see the [documentation](https://jupyter-ai.readthedocs.io).
"""


def HelpMessage():
def _format_help_message(chat_handlers: Dict[str, BaseChatHandler]):
commands = "\n".join(
[
f"* `{command_name}` — {handler.help}"
for command_name, handler in chat_handlers.items()
if command_name != "default"
]
)
return HELP_MESSAGE.format(commands=commands)


def HelpMessage(chat_handlers: Dict[str, BaseChatHandler]):
return AgentChatMessage(
id=uuid4().hex,
time=time.time(),
body=HELP_MESSAGE,
body=_format_help_message(chat_handlers),
reply_to="",
)


class HelpChatHandler(BaseChatHandler):
id = "help"
name = "Help"
help = "Displays a help message in the chat message area"
help = "Display this help message"
routing_type = SlashCommandRoutingType(slash_id="help")

def __init__(self, *args, **kwargs):
uses_llm = False

def __init__(self, *args, chat_handlers: Dict[str, BaseChatHandler], **kwargs):
super().__init__(*args, **kwargs)
self._chat_handlers = chat_handlers

async def process_message(self, message: HumanChatMessage):
self.reply(HELP_MESSAGE, message)
self.reply(_format_help_message(self._chat_handlers), message)
4 changes: 3 additions & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,11 @@
class LearnChatHandler(BaseChatHandler):
id = "learn"
name = "Learn Local Data"
help = "Pass a list of files and directories. Once converted to vector format, you can ask about them with /ask."
help = "Teach Jupyternaut about files on your system"
routing_type = SlashCommandRoutingType(slash_id="learn")

uses_llm = True

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.parser.prog = "/learn"
Expand Down
14 changes: 11 additions & 3 deletions packages/jupyter-ai/jupyter_ai/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def initialize_settings(self):
# list of chat messages to broadcast to new clients
# this is only used to render the UI, and is not the conversational
# memory object used by the LM chain.
self.settings["chat_history"] = [HelpMessage()]
self.settings["chat_history"] = []

# get reference to event loop
# `asyncio.get_event_loop()` is deprecated in Python 3.11+, in favor of
Expand Down Expand Up @@ -189,7 +189,6 @@ def initialize_settings(self):
log_dir=self.error_logs_dir,
)
learn_chat_handler = LearnChatHandler(**chat_handler_kwargs)
help_chat_handler = HelpChatHandler(**chat_handler_kwargs)
retriever = Retriever(learn_chat_handler=learn_chat_handler)
ask_chat_handler = AskChatHandler(**chat_handler_kwargs, retriever=retriever)

Expand All @@ -199,9 +198,12 @@ def initialize_settings(self):
"/clear": clear_chat_handler,
"/generate": generate_chat_handler,
"/learn": learn_chat_handler,
"/help": help_chat_handler,
}

help_chat_handler = HelpChatHandler(
**chat_handler_kwargs, chat_handlers=jai_chat_handlers
)

slash_command_pattern = r"^[a-zA-Z0-9_]+$"
for chat_handler_ep in chat_handler_eps:
try:
Expand Down Expand Up @@ -248,6 +250,12 @@ def initialize_settings(self):
f"Registered chat handler `{chat_handler.id}` with command `{command_name}`."
)

# Make help always appear as the last command
jai_chat_handlers["/help"] = help_chat_handler

self.settings["chat_history"].append(
HelpMessage(chat_handlers=jai_chat_handlers)
)
self.settings["jai_chat_handlers"] = jai_chat_handlers

latency_ms = round((time.time() - start) * 1000)
Expand Down

0 comments on commit 9c4832a

Please sign in to comment.