Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] authored and michaelchia committed Jun 13, 2024
1 parent cb6d5b5 commit 40e38e6
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 4 deletions.
6 changes: 3 additions & 3 deletions packages/jupyter-ai/jupyter_ai/chat_handlers/base.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import argparse
import contextlib
import os
import time
import traceback
import contextlib
from typing import (
TYPE_CHECKING,
Awaitable,
Expand All @@ -21,9 +21,9 @@
from jupyter_ai.models import (
AgentChatMessage,
ChatMessage,
ClosePendingMessage,
HumanChatMessage,
PendingMessage,
ClosePendingMessage,
)
from jupyter_ai_magics import Persona
from jupyter_ai_magics.providers import BaseProvider
Expand Down Expand Up @@ -199,7 +199,7 @@ def reply(self, response: str, human_msg: Optional[HumanChatMessage] = None):

handler.broadcast_message(agent_msg)
break

def start_pending(self, text: str, ellipsis: bool = True) -> str:
"""
Sends a pending message to the client.
Expand Down
4 changes: 3 additions & 1 deletion packages/jupyter-ai/jupyter_ai/chat_handlers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,5 +48,7 @@ def create_llm_chain(
async def process_message(self, message: HumanChatMessage):
self.get_llm_chain()
with self.pending(PENDING_MESSAGE):
response = await self.llm_chain.apredict(input=message.body, stop=["\nHuman:"])
response = await self.llm_chain.apredict(
input=message.body, stop=["\nHuman:"]
)
self.reply(response, message)

0 comments on commit 40e38e6

Please sign in to comment.