diff --git a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py index 33d220f94..9d843fe4d 100644 --- a/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py +++ b/packages/jupyter-ai-magics/jupyter_ai_magics/providers.py @@ -362,6 +362,11 @@ def __init__(self, **kwargs): async def _acall(self, *args, **kwargs) -> Coroutine[Any, Any, str]: return await self._call_in_executor(*args, **kwargs) + @property + def allows_concurrency(self): + # At present, GPT4All providers fail with concurrent messages. See #481. + return False + HUGGINGFACE_HUB_VALID_TASKS = ( "text2text-generation", diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py index 5716ddd9b..8fe5c7f3a 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py @@ -31,6 +31,8 @@ class AskChatHandler(BaseChatHandler): help = "Ask a question about your learned data" routing_type = SlashCommandRoutingType(slash_id="ask") + uses_llm = True + def __init__(self, retriever, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py index 15aef6788..fb8a5e3de 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py @@ -58,6 +58,15 @@ class BaseChatHandler: routing_type: HandlerRoutingType = ... + uses_llm: ClassVar[bool] = True + """Class attribute specifying whether this chat handler uses the LLM + specified by the config. Subclasses should define this. Should be set to + `False` for handlers like `/help`.""" + + _requests_count = 0 + """Class attribute set to the number of requests that Jupyternaut is + currently handling.""" + def __init__( self, log: Logger, @@ -82,11 +91,26 @@ def __init__( async def on_message(self, message: HumanChatMessage): """ - Method which receives a human message and processes it via - `self.process_message()`, calling `self.handle_exc()` when an exception - is raised. This method is called by RootChatHandler when it routes a - human message to this chat handler. + Method which receives a human message, calls `self.get_llm_chain()`, and + processes the message via `self.process_message()`, calling + `self.handle_exc()` when an exception is raised. This method is called + by RootChatHandler when it routes a human message to this chat handler. """ + + # check whether the configured LLM can support a request at this time. + if self.uses_llm and BaseChatHandler._requests_count > 0: + lm_provider_klass = self.config_manager.lm_provider + lm_provider_params = self.config_manager.lm_provider_params + lm_provider = lm_provider_klass(**lm_provider_params) + + if not lm_provider.allows_concurrency: + self.reply( + "The currently selected language model can process only one request at a time. Please wait for me to reply before sending another question.", + message, + ) + return + + BaseChatHandler._requests_count += 1 try: await self.process_message(message) except Exception as e: @@ -96,6 +120,8 @@ async def on_message(self, message: HumanChatMessage): await self.handle_exc(e, message) except Exception as e: await self._default_handle_exc(e, message) + finally: + BaseChatHandler._requests_count -= 1 async def process_message(self, message: HumanChatMessage): """ diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py index 73db24a80..0555abfca 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py @@ -11,6 +11,8 @@ class ClearChatHandler(BaseChatHandler): help = "Clear the chat window" routing_type = SlashCommandRoutingType(slash_id="clear") + uses_llm = False + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index 5bd839ca5..3a76fba44 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -37,6 +37,8 @@ class DefaultChatHandler(BaseChatHandler): help = "Responds to prompts that are not otherwise handled by a chat handler" routing_type = SlashCommandRoutingType(slash_id=None) + uses_llm = True + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.memory = ConversationBufferWindowMemory(return_messages=True, k=2) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py index 1f463dd9e..50ef6b02c 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py @@ -221,6 +221,8 @@ class GenerateChatHandler(BaseChatHandler): help = "Generate a Jupyter notebook from a text prompt" routing_type = SlashCommandRoutingType(slash_id="generate") + uses_llm = True + def __init__(self, preferred_dir: str, log_dir: Optional[str], *args, **kwargs): super().__init__(*args, **kwargs) self.log_dir = Path(log_dir) if log_dir else None diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py index 3735a7c5e..ebb8f0383 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/help.py @@ -41,6 +41,8 @@ class HelpChatHandler(BaseChatHandler): help = "Display this help message" routing_type = SlashCommandRoutingType(slash_id="help") + uses_llm = False + def __init__(self, *args, chat_handlers: Dict[str, BaseChatHandler], **kwargs): super().__init__(*args, **kwargs) self._chat_handlers = chat_handlers diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py index a36885297..3f67c07d2 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py @@ -36,6 +36,8 @@ class LearnChatHandler(BaseChatHandler): help = "Teach Jupyternaut about files on your system" routing_type = SlashCommandRoutingType(slash_id="learn") + uses_llm = True + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.parser.prog = "/learn"