diff --git a/docs/source/_static/chat-icon-left-tab-bar-custom.png b/docs/source/_static/chat-icon-left-tab-bar-custom.png
new file mode 100644
index 000000000..b9ec3e71d
Binary files /dev/null and b/docs/source/_static/chat-icon-left-tab-bar-custom.png differ
diff --git a/docs/source/_static/chat-icon-left-tab-bar.png b/docs/source/_static/chat-icon-left-tab-bar.png
index 4ad54b6c4..07f21b52e 100644
Binary files a/docs/source/_static/chat-icon-left-tab-bar.png and b/docs/source/_static/chat-icon-left-tab-bar.png differ
diff --git a/docs/source/users/index.md b/docs/source/users/index.md
index a57afbde7..ce1ea6d46 100644
--- a/docs/source/users/index.md
+++ b/docs/source/users/index.md
@@ -245,10 +245,31 @@ Before you can use the chat interface, you need to provide your API keys for the
alt="Screen shot of the setup interface, showing model selections and key populated"
class="screenshot" />
-Once you have set all the necessary keys, click the "back" (left arrow) button in the upper-left corner of the Jupyter AI side panel. The chat interface now appears, and you can ask a question using the message box at the bottom.
+Once you have set all the necessary keys, click the "back" (left arrow) button in the upper-left corner of the Jupyter AI side panel. The chat interface now appears, with a help menu of available `/` (slash) commands, and you can ask a question using the message box at the bottom.
+
+You may customize the template of the chat interface from the default one. The steps are as follows:
+1. Create a new `config.py` file in your current directory with the contents you want to see in the help message, by editing the template below:
+```
+c.AiExtension.help_message_template = """
+Sup. I'm {persona_name}. This is a sassy custom help message.
+
+Here's the slash commands you can use. Use 'em or don't... I don't care.
+
+{slash_commands_list}
+""".strip()
+```
+2. Start JupyterLab with the following command:
+```
+jupyter lab --config=config.py
+```
+The new help message will be used instead of the default, as shown below
+
+
To compose a message, type it in the text box at the bottom of the chat interface and press ENTER to send it. You can press SHIFT+ENTER to add a new line. (These are the default keybindings; you can change them in the chat settings pane.) Once you have sent a message, you should see a response from Jupyternaut, the Jupyter AI chatbot.
diff --git a/packages/jupyter-ai-test/jupyter_ai_test/test_providers.py b/packages/jupyter-ai-test/jupyter_ai_test/test_providers.py
index f2803deec..1ec042afb 100644
--- a/packages/jupyter-ai-test/jupyter_ai_test/test_providers.py
+++ b/packages/jupyter-ai-test/jupyter_ai_test/test_providers.py
@@ -75,3 +75,41 @@ class TestProviderWithStreaming(BaseProvider, TestLLMWithStreaming):
fields: ClassVar[List[Field]] = []
"""User inputs expected by this provider when initializing it. Each `Field` `f`
should be passed in the constructor as a keyword argument, keyed by `f.key`."""
+
+
+class TestProviderAskLearnUnsupported(BaseProvider, TestLLMWithStreaming):
+ id: ClassVar[str] = "test-provider-ask-learn-unsupported"
+ """ID for this provider class."""
+
+ name: ClassVar[str] = "Test Provider (/learn and /ask unsupported)"
+ """User-facing name of this provider."""
+
+ models: ClassVar[List[str]] = ["test"]
+ """List of supported models by their IDs. For registry providers, this will
+ be just ["*"]."""
+
+ help: ClassVar[str] = None
+ """Text to display in lieu of a model list for a registry provider that does
+ not provide a list of models."""
+
+ model_id_key: ClassVar[str] = "model_id"
+ """Kwarg expected by the upstream LangChain provider."""
+
+ model_id_label: ClassVar[str] = "Model ID"
+ """Human-readable label of the model ID."""
+
+ pypi_package_deps: ClassVar[List[str]] = []
+ """List of PyPi package dependencies."""
+
+ auth_strategy: ClassVar[AuthStrategy] = None
+ """Authentication/authorization strategy. Declares what credentials are
+ required to use this model provider. Generally should not be `None`."""
+
+ registry: ClassVar[bool] = False
+ """Whether this provider is a registry provider."""
+
+ fields: ClassVar[List[Field]] = []
+ """User inputs expected by this provider when initializing it. Each `Field` `f`
+ should be passed in the constructor as a keyword argument, keyed by `f.key`."""
+
+ unsupported_slash_commands = {"/learn", "/ask"}
diff --git a/packages/jupyter-ai-test/pyproject.toml b/packages/jupyter-ai-test/pyproject.toml
index c50c520d1..eaecc09d3 100644
--- a/packages/jupyter-ai-test/pyproject.toml
+++ b/packages/jupyter-ai-test/pyproject.toml
@@ -31,6 +31,7 @@ test = ["coverage", "pytest", "pytest-asyncio", "pytest-cov"]
[project.entry-points."jupyter_ai.model_providers"]
test-provider = "jupyter_ai_test.test_providers:TestProvider"
test-provider-with-streaming = "jupyter_ai_test.test_providers:TestProviderWithStreaming"
+test-provider-ask-learn-unsupported = "jupyter_ai_test.test_providers:TestProviderAskLearnUnsupported"
[project.entry-points."jupyter_ai.chat_handlers"]
test-slash-command = "jupyter_ai_test.test_slash_commands:TestSlashCommand"
diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py
index fbb4cdb31..83aaef4f0 100644
--- a/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py
+++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py
@@ -90,7 +90,7 @@ class BaseChatHandler:
"""What this chat handler does, which third-party models it contacts,
the data it returns to the user, and so on, for display in the UI."""
- routing_type: HandlerRoutingType = ...
+ routing_type: ClassVar[HandlerRoutingType] = ...
uses_llm: ClassVar[bool] = True
"""Class attribute specifying whether this chat handler uses the LLM
@@ -102,10 +102,20 @@ class BaseChatHandler:
parse the arguments and display help when user queries with
`-h` or `--help`"""
- _requests_count = 0
+ _requests_count: ClassVar[int] = 0
"""Class attribute set to the number of requests that Jupyternaut is
currently handling."""
+ # Instance attributes
+ help_message_template: str
+ """Format string template that is used to build the help message. Specified
+ from traitlets configuration."""
+
+ chat_handlers: Dict[str, "BaseChatHandler"]
+ """Dictionary of chat handlers. Allows one chat handler to reference other
+ chat handlers, which is necessary for some use-cases like printing the help
+ message."""
+
def __init__(
self,
log: Logger,
@@ -116,6 +126,8 @@ def __init__(
root_dir: str,
preferred_dir: Optional[str],
dask_client_future: Awaitable[DaskClient],
+ help_message_template: str,
+ chat_handlers: Dict[str, "BaseChatHandler"],
):
self.log = log
self.config_manager = config_manager
@@ -133,6 +145,9 @@ def __init__(
self.root_dir = os.path.abspath(os.path.expanduser(root_dir))
self.preferred_dir = get_preferred_dir(self.root_dir, preferred_dir)
self.dask_client_future = dask_client_future
+ self.help_message_template = help_message_template
+ self.chat_handlers = chat_handlers
+
self.llm = None
self.llm_params = None
self.llm_chain = None
@@ -366,3 +381,37 @@ def output_dir(self) -> str:
return self.preferred_dir
else:
return self.root_dir
+
+ def send_help_message(self, human_msg: Optional[HumanChatMessage] = None) -> None:
+ """Sends a help message to all connected clients."""
+ lm_provider = self.config_manager.lm_provider
+ unsupported_slash_commands = (
+ lm_provider.unsupported_slash_commands if lm_provider else set()
+ )
+ chat_handlers = self.chat_handlers
+ slash_commands = {k: v for k, v in chat_handlers.items() if k != "default"}
+ for key in unsupported_slash_commands:
+ del slash_commands[key]
+
+ # markdown string that lists the slash commands
+ slash_commands_list = "\n".join(
+ [
+ f"* `{command_name}` — {handler.help}"
+ for command_name, handler in slash_commands.items()
+ ]
+ )
+
+ help_message_body = self.help_message_template.format(
+ persona_name=self.persona.name, slash_commands_list=slash_commands_list
+ )
+ help_message = AgentChatMessage(
+ id=uuid4().hex,
+ time=time.time(),
+ body=help_message_body,
+ reply_to=human_msg.id if human_msg else "",
+ persona=self.persona,
+ )
+
+ self._chat_history.append(help_message)
+ for websocket in self._root_chat_handlers.values():
+ websocket.write_message(help_message.json())
diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py
index 97cae4ab4..2d4252256 100644
--- a/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py
+++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py
@@ -1,7 +1,4 @@
-from typing import List
-
-from jupyter_ai.chat_handlers.help import build_help_message
-from jupyter_ai.models import ChatMessage, ClearMessage
+from jupyter_ai.models import ClearMessage
from .base import BaseChatHandler, SlashCommandRoutingType
@@ -20,22 +17,14 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
async def process_message(self, _):
+ # Clear chat
for handler in self._root_chat_handlers.values():
if not handler:
continue
- # Clear chat
handler.broadcast_message(ClearMessage())
self._chat_history.clear()
-
- # Build /help message and reinstate it in chat
- chat_handlers = handler.chat_handlers
- persona = self.config_manager.persona
- lm_provider = self.config_manager.lm_provider
- unsupported_slash_commands = (
- lm_provider.unsupported_slash_commands if lm_provider else set()
- )
- msg = build_help_message(chat_handlers, persona, unsupported_slash_commands)
- self.reply(msg.body)
-
break
+
+ # re-send help message
+ self.send_help_message()
diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py
index 0e82be13d..cd8556863 100644
--- a/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py
+++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py
@@ -1,53 +1,7 @@
-import time
-from typing import Dict
-from uuid import uuid4
-
-from jupyter_ai.models import AgentChatMessage, HumanChatMessage
-from jupyter_ai_magics import Persona
+from jupyter_ai.models import HumanChatMessage
from .base import BaseChatHandler, SlashCommandRoutingType
-HELP_MESSAGE = """Hi there! I'm {persona_name}, your programming assistant.
-You can ask me a question using the text box below. You can also use these commands:
-{commands}
-
-Jupyter AI includes [magic commands](https://jupyter-ai.readthedocs.io/en/latest/users/index.html#the-ai-and-ai-magic-commands) that you can use in your notebooks.
-For more information, see the [documentation](https://jupyter-ai.readthedocs.io).
-"""
-
-
-def _format_help_message(
- chat_handlers: Dict[str, BaseChatHandler],
- persona: Persona,
- unsupported_slash_commands: set,
-):
- if unsupported_slash_commands:
- keys = set(chat_handlers.keys()) - unsupported_slash_commands
- chat_handlers = {key: chat_handlers[key] for key in keys}
-
- commands = "\n".join(
- [
- f"* `{command_name}` — {handler.help}"
- for command_name, handler in chat_handlers.items()
- if command_name != "default"
- ]
- )
- return HELP_MESSAGE.format(commands=commands, persona_name=persona.name)
-
-
-def build_help_message(
- chat_handlers: Dict[str, BaseChatHandler],
- persona: Persona,
- unsupported_slash_commands: set,
-):
- return AgentChatMessage(
- id=uuid4().hex,
- time=time.time(),
- body=_format_help_message(chat_handlers, persona, unsupported_slash_commands),
- reply_to="",
- persona=Persona(name=persona.name, avatar_route=persona.avatar_route),
- )
-
class HelpChatHandler(BaseChatHandler):
id = "help"
@@ -58,19 +12,8 @@ class HelpChatHandler(BaseChatHandler):
uses_llm = False
- def __init__(self, *args, chat_handlers: Dict[str, BaseChatHandler], **kwargs):
+ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self._chat_handlers = chat_handlers
async def process_message(self, message: HumanChatMessage):
- persona = self.config_manager.persona
- lm_provider = self.config_manager.lm_provider
- unsupported_slash_commands = (
- lm_provider.unsupported_slash_commands if lm_provider else set()
- )
- self.reply(
- _format_help_message(
- self._chat_handlers, persona, unsupported_slash_commands
- ),
- message,
- )
+ self.send_help_message(message)
diff --git a/packages/jupyter-ai/jupyter_ai/extension.py b/packages/jupyter-ai/jupyter_ai/extension.py
index 9aab5c5ba..e89f6bdb4 100644
--- a/packages/jupyter-ai/jupyter_ai/extension.py
+++ b/packages/jupyter-ai/jupyter_ai/extension.py
@@ -22,7 +22,6 @@
HelpChatHandler,
LearnChatHandler,
)
-from .chat_handlers.help import build_help_message
from .completions.handlers import DefaultInlineCompletionHandler
from .config_manager import ConfigManager
from .handlers import (
@@ -41,6 +40,15 @@
)
+DEFAULT_HELP_MESSAGE_TEMPLATE = """Hi there! I'm {persona_name}, your programming assistant.
+You can ask me a question using the text box below. You can also use these commands:
+{slash_commands_list}
+
+Jupyter AI includes [magic commands](https://jupyter-ai.readthedocs.io/en/latest/users/index.html#the-ai-and-ai-magic-commands) that you can use in your notebooks.
+For more information, see the [documentation](https://jupyter-ai.readthedocs.io).
+"""
+
+
class AiExtension(ExtensionApp):
name = "jupyter_ai"
handlers = [
@@ -158,6 +166,23 @@ class AiExtension(ExtensionApp):
config=True,
)
+ help_message_template = Unicode(
+ default_value=DEFAULT_HELP_MESSAGE_TEMPLATE,
+ help="""
+ A format string accepted by `str.format()`, which is used to generate a
+ dynamic help message. The format string should contain exactly two
+ named replacement fields: `persona_name` and `slash_commands_list`.
+
+ - `persona_name`: String containing the name of the persona, which is
+ defined by the configured language model. Usually defaults to
+ 'Jupyternaut'.
+
+ - `slash_commands_list`: A string containing a bulleted list of the
+ slash commands available to the configured language model.
+ """,
+ config=True,
+ )
+
def initialize_settings(self):
start = time.time()
@@ -240,22 +265,21 @@ def initialize_settings(self):
eps = entry_points()
- common_handler_kargs = {
- "log": self.log,
- "config_manager": self.settings["jai_config_manager"],
- "model_parameters": self.settings["model_parameters"],
- }
-
# initialize chat handlers
chat_handler_eps = eps.select(group="jupyter_ai.chat_handlers")
+ chat_handlers = {}
chat_handler_kwargs = {
- **common_handler_kargs,
+ "log": self.log,
+ "config_manager": self.settings["jai_config_manager"],
+ "model_parameters": self.settings["model_parameters"],
"root_chat_handlers": self.settings["jai_root_chat_handlers"],
"chat_history": self.settings["chat_history"],
"root_dir": self.serverapp.root_dir,
"dask_client_future": self.settings["dask_client_future"],
"model_parameters": self.settings["model_parameters"],
"preferred_dir": self.serverapp.contents_manager.preferred_dir,
+ "help_message_template": self.help_message_template,
+ "chat_handlers": chat_handlers,
}
default_chat_handler = DefaultChatHandler(**chat_handler_kwargs)
clear_chat_handler = ClearChatHandler(**chat_handler_kwargs)
@@ -271,19 +295,13 @@ def initialize_settings(self):
fix_chat_handler = FixChatHandler(**chat_handler_kwargs)
- jai_chat_handlers = {
- "default": default_chat_handler,
- "/ask": ask_chat_handler,
- "/clear": clear_chat_handler,
- "/generate": generate_chat_handler,
- "/learn": learn_chat_handler,
- "/export": export_chat_handler,
- "/fix": fix_chat_handler,
- }
-
- help_chat_handler = HelpChatHandler(
- **chat_handler_kwargs, chat_handlers=jai_chat_handlers
- )
+ chat_handlers["default"] = default_chat_handler
+ chat_handlers["/ask"] = ask_chat_handler
+ chat_handlers["/clear"] = clear_chat_handler
+ chat_handlers["/generate"] = generate_chat_handler
+ chat_handlers["/learn"] = learn_chat_handler
+ chat_handlers["/export"] = export_chat_handler
+ chat_handlers["/fix"] = fix_chat_handler
slash_command_pattern = r"^[a-zA-Z0-9_]+$"
for chat_handler_ep in chat_handler_eps:
@@ -319,23 +337,23 @@ def initialize_settings(self):
)
continue
- if command_name in jai_chat_handlers:
+ if command_name in chat_handlers:
self.log.error(
f"Unable to register chat handler `{chat_handler.id}` because command `{command_name}` already has a handler"
)
continue
# The entry point is a class; we need to instantiate the class to send messages to it
- jai_chat_handlers[command_name] = chat_handler(**chat_handler_kwargs)
+ chat_handlers[command_name] = chat_handler(**chat_handler_kwargs)
self.log.info(
f"Registered chat handler `{chat_handler.id}` with command `{command_name}`."
)
# Make help always appear as the last command
- jai_chat_handlers["/help"] = help_chat_handler
+ chat_handlers["/help"] = HelpChatHandler(**chat_handler_kwargs)
# bind chat handlers to settings
- self.settings["jai_chat_handlers"] = jai_chat_handlers
+ self.settings["jai_chat_handlers"] = chat_handlers
# show help message at server start
self._show_help_message()
@@ -348,21 +366,13 @@ def _show_help_message(self):
Method that ensures a dynamically-generated help message is included in
the chat history shown to users.
"""
- chat_handlers = self.settings["jai_chat_handlers"]
- config_manager: ConfigManager = self.settings["jai_config_manager"]
- lm_provider = config_manager.lm_provider
-
- if not lm_provider:
- return
-
- persona = config_manager.persona
- unsupported_slash_commands = (
- lm_provider.unsupported_slash_commands if lm_provider else set()
- )
- help_message = build_help_message(
- chat_handlers, persona, unsupported_slash_commands
- )
- self.settings["chat_history"].append(help_message)
+ # call `send_help_message()` on any instance of `BaseChatHandler`. The
+ # `default` chat handler should always exist, so we reference that
+ # object when calling `send_help_message()`.
+ default_chat_handler: DefaultChatHandler = self.settings["jai_chat_handlers"][
+ "default"
+ ]
+ default_chat_handler.send_help_message()
async def _get_dask_client(self):
return DaskClient(processes=False, asynchronous=True)
diff --git a/packages/jupyter-ai/jupyter_ai/tests/test_handlers.py b/packages/jupyter-ai/jupyter_ai/tests/test_handlers.py
index d2e73ce6c..d0ccbe552 100644
--- a/packages/jupyter-ai/jupyter_ai/tests/test_handlers.py
+++ b/packages/jupyter-ai/jupyter_ai/tests/test_handlers.py
@@ -7,10 +7,9 @@
import pytest
from jupyter_ai.chat_handlers import DefaultChatHandler, learn
from jupyter_ai.config_manager import ConfigManager
+from jupyter_ai.extension import DEFAULT_HELP_MESSAGE_TEMPLATE
from jupyter_ai.handlers import RootChatHandler
from jupyter_ai.models import (
- AgentStreamChunkMessage,
- AgentStreamMessage,
ChatClient,
ClosePendingMessage,
HumanChatMessage,
@@ -73,6 +72,8 @@ def broadcast_message(message: Message) -> None:
root_dir="",
preferred_dir="",
dask_client_future=None,
+ help_message_template=DEFAULT_HELP_MESSAGE_TEMPLATE,
+ chat_handlers={},
)