Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Log exceptions in /generate to a file #431

Merged
merged 8 commits into from
Nov 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def create_llm_chain(
verbose=False,
)

async def _process_message(self, message: HumanChatMessage):
async def process_message(self, message: HumanChatMessage):
args = self.parse_args(message)
if args is None:
return
Expand Down
53 changes: 43 additions & 10 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from jupyter_ai.config_manager import ConfigManager, Logger
from jupyter_ai.models import AgentChatMessage, HumanChatMessage
from jupyter_ai_magics.providers import BaseProvider
from langchain.chat_models.base import BaseChatModel

if TYPE_CHECKING:
from jupyter_ai.handlers import RootChatHandler
Expand All @@ -33,20 +32,54 @@ def __init__(
self.llm_params = None
self.llm_chain = None

async def process_message(self, message: HumanChatMessage):
"""Processes the message passed by the root chat handler."""
async def on_message(self, message: HumanChatMessage):
"""
Method which receives a human message and processes it via
`self.process_message()`, calling `self.handle_exc()` when an exception
is raised. This method is called by RootChatHandler when it routes a
human message to this chat handler.
"""
try:
await self._process_message(message)
await self.process_message(message)
except Exception as e:
formatted_e = traceback.format_exc()
response = f"Sorry, something went wrong and I wasn't able to index that path.\n\n```\n{formatted_e}\n```"
self.reply(response, message)
try:
# we try/except `handle_exc()` in case it was overriden and
# raises an exception by accident.
await self.handle_exc(e, message)
except Exception as e:
await self._default_handle_exc(e, message)

async def _process_message(self, message: HumanChatMessage):
"""Processes the message passed by the `Router`"""
async def process_message(self, message: HumanChatMessage):
"""
Processes a human message routed to this chat handler. Chat handlers
(subclasses) must implement this method. Don't forget to call
`self.reply(<response>, message)` at the end!

The method definition does not need to be wrapped in a try/except block;
any exceptions raised here are caught by `self.handle_exc()`.
"""
raise NotImplementedError("Should be implemented by subclasses.")

def reply(self, response, human_msg: Optional[HumanChatMessage] = None):
async def handle_exc(self, e: Exception, message: HumanChatMessage):
"""
Handles an exception raised by `self.process_message()`. A default
implementation is provided, however chat handlers (subclasses) should
implement this method to provide a more helpful error response.
"""
self._default_handle_exc(e, message)

async def _default_handle_exc(self, e: Exception, message: HumanChatMessage):
"""
The default definition of `handle_exc()`. This is the default used when
the `handle_exc()` excepts.
"""
formatted_e = traceback.format_exc()
response = (
f"Sorry, an error occurred. Details below:\n\n```\n{formatted_e}\n```"
)
self.reply(response, message)

def reply(self, response: str, human_msg: Optional[HumanChatMessage] = None):
agent_msg = AgentChatMessage(
id=uuid4().hex,
time=time.time(),
Expand Down
2 changes: 1 addition & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def __init__(self, chat_history: List[ChatMessage], *args, **kwargs):
super().__init__(*args, **kwargs)
self._chat_history = chat_history

async def _process_message(self, _):
async def process_message(self, _):
self._chat_history.clear()
for handler in self._root_chat_handlers.values():
if not handler:
Expand Down
2 changes: 1 addition & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def clear_memory(self):
if self.chat_history:
self.chat_history.clear()

async def _process_message(self, message: HumanChatMessage):
async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()
response = await self.llm_chain.apredict(input=message.body, stop=["\nHuman:"])
self.reply(response, message)
26 changes: 17 additions & 9 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import asyncio
import os
import time
import traceback
from pathlib import Path
from typing import Dict, List, Optional, Type

import nbformat
Expand Down Expand Up @@ -248,18 +251,23 @@ async def _generate_notebook(self, prompt: str):
nbformat.write(notebook, final_path)
return final_path

async def _process_message(self, message: HumanChatMessage):
async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()

# first send a verification message to user
response = "👍 Great, I will get started on your notebook. It may take a few minutes, but I will reply here when the notebook is ready. In the meantime, you can continue to ask me other questions."
self.reply(response, message)

try:
final_path = await self._generate_notebook(prompt=message.body)
response = f"""🎉 I have created your notebook and saved it to the location {final_path}. I am still learning how to create notebooks, so please review all code before running it."""
except Exception as e:
self.log.exception(e)
response = "An error occurred while generating the notebook. Try running the /generate task again."
finally:
self.reply(response, message)
final_path = await self._generate_notebook(prompt=message.body)
response = f"""🎉 I have created your notebook and saved it to the location {final_path}. I am still learning how to create notebooks, so please review all code before running it."""
self.reply(response, message)

async def handle_exc(self, e: Exception, message: HumanChatMessage):
timestamp = time.strftime("%Y-%m-%d-%H.%M.%S")
log_path = Path(f"jupyter-ai-logs/generate-{timestamp}.log")
log_path.parent.mkdir(parents=True, exist_ok=True)
with log_path.open("w") as log:
traceback.print_exc(file=log)

response = f"An error occurred while generating the notebook. The error details have been saved to `./{log_path}`.\n\nTry running `/generate` again, as some language models require multiple attempts before a notebook is generated."
self.reply(response, message)
2 changes: 1 addition & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/help.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,5 @@ class HelpChatHandler(BaseChatHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

async def _process_message(self, message: HumanChatMessage):
async def process_message(self, message: HumanChatMessage):
self.reply(HELP_MESSAGE, message)
2 changes: 1 addition & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _load(self):
except Exception as e:
self.log.error("Could not load vector index from disk.")

async def _process_message(self, message: HumanChatMessage):
async def process_message(self, message: HumanChatMessage):
# If no embedding provider has been selected
em_provider_cls, em_provider_args = self.get_embedding_provider()
if not em_provider_cls:
Expand Down
4 changes: 2 additions & 2 deletions packages/jupyter-ai/jupyter_ai/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,9 +222,9 @@ async def _route(self, message):

start = time.time()
if is_command:
await self.chat_handlers[command].process_message(message)
await self.chat_handlers[command].on_message(message)
else:
await default.process_message(message)
await default.on_message(message)

latency_ms = round((time.time() - start) * 1000)
command_readable = "Default" if command == "default" else command
Expand Down
Loading