From 67191c165e6e18f829f5ff0188edf1af4232a084 Mon Sep 17 00:00:00 2001 From: michael Date: Mon, 12 Aug 2024 21:33:54 +0800 Subject: [PATCH 01/22] add ui components --- .../jupyter_ai/chat_handlers/clear.py | 1 - packages/jupyter-ai/jupyter_ai/handlers.py | 35 ++++++++++++++++++- packages/jupyter-ai/jupyter_ai/models.py | 12 +++++++ packages/jupyter-ai/src/chat_handler.ts | 17 +++++++-- .../src/components/chat-messages.tsx | 13 +++++++ .../chat-messages/chat-message-delete.tsx | 29 +++++++++++++++ packages/jupyter-ai/src/components/chat.tsx | 28 +++++++++++---- .../src/components/pending-messages.tsx | 3 ++ packages/jupyter-ai/src/handler.ts | 8 +++++ 9 files changed, 136 insertions(+), 10 deletions(-) create mode 100644 packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py index 97cae4ab4..07e386d45 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py @@ -26,7 +26,6 @@ async def process_message(self, _): # Clear chat handler.broadcast_message(ClearMessage()) - self._chat_history.clear() # Build /help message and reinstate it in chat chat_handlers = handler.chat_handlers diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index a08a87df9..8f5600ebd 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -24,6 +24,8 @@ ChatMessage, ChatRequest, ChatUser, + ClearMessage, + ClearRequest, ClosePendingMessage, ConnectionMessage, HumanChatMessage, @@ -236,17 +238,31 @@ def broadcast_message(self, message: Message): self.pending_messages = list( filter(lambda m: m.id != message.id, self.pending_messages) ) + elif isinstance(message, ClearMessage): + if message.at: + self._clear_chat_history_at(message.at) + else: + self.chat_history.clear() + self.pending_messages.clear() async def on_message(self, message): self.log.debug("Message received: %s", message) try: message = json.loads(message) - chat_request = ChatRequest(**message) + if message.get("type") == "clear": + request = ClearRequest(**message) + else: + request = ChatRequest(**message) except ValidationError as e: self.log.error(e) return + if isinstance(request, ClearRequest): + self.broadcast_message(ClearMessage(at=request.at)) + return + + chat_request = request message_body = chat_request.prompt if chat_request.selection: message_body += f"\n\n```\n{chat_request.selection.source}\n```\n" @@ -292,6 +308,23 @@ async def _route(self, message): command_readable = "Default" if command == "default" else command self.log.info(f"{command_readable} chat handler resolved in {latency_ms} ms.") + def _clear_chat_history_at(self, msg_id: str): + """Clears the chat history at a specific message ID.""" + target_msg = None + for msg in self.chat_history: + if msg.id == msg_id: + target_msg = msg + + if msg is not None: + self.chat_history[:] = [ + msg for msg in self.chat_history + if msg.time < target_msg.time + ] + self.pending_messages[:] = [ + msg for msg in self.pending_messages + if msg.time < target_msg.time + ] + def on_close(self): self.log.debug("Disconnecting client with user %s", self.client_id) diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index f2fa098bb..6e330ba3c 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -38,6 +38,13 @@ class ChatRequest(BaseModel): prompt: str selection: Optional[Selection] +class ClearRequest(BaseModel): + type: Literal["clear"] + at: Optional[str] + """ + Message ID of the ChatMessage to clear at and all messages after. + If empty strig, clears all. + """ class ChatUser(BaseModel): # User ID assigned by IdentityProvider. @@ -105,6 +112,11 @@ class HumanChatMessage(BaseModel): class ClearMessage(BaseModel): type: Literal["clear"] = "clear" + at: Optional[str] = None + """ + Message ID of the ChatMessage to clear at and all messages after. + If not provided, clears all. + """ class PendingMessage(BaseModel): diff --git a/packages/jupyter-ai/src/chat_handler.ts b/packages/jupyter-ai/src/chat_handler.ts index f1b131dcf..10fea41e0 100644 --- a/packages/jupyter-ai/src/chat_handler.ts +++ b/packages/jupyter-ai/src/chat_handler.ts @@ -39,7 +39,7 @@ export class ChatHandler implements IDisposable { * Sends a message across the WebSocket. Promise resolves to the message ID * when the server sends the same message back, acknowledging receipt. */ - public sendMessage(message: AiService.ChatRequest): Promise { + public sendMessage(message: AiService.Request): Promise { return new Promise(resolve => { this._socket?.send(JSON.stringify(message)); this._sendResolverQueue.push(resolve); @@ -132,7 +132,20 @@ export class ChatHandler implements IDisposable { case 'connection': break; case 'clear': - this._messages = []; + if (newMessage.at) { + const target_msg = this._messages.find(m => m.id === newMessage.at); + if (target_msg) { + this._messages = this._messages.filter( + msg => msg.time < target_msg.time + ); + this._pendingMessages = this._pendingMessages.filter( + msg => msg.time < target_msg.time + ); + } + } else { + this._messages = []; + this._pendingMessages = []; + } break; case 'pending': this._pendingMessages = [...this._pendingMessages, newMessage]; diff --git a/packages/jupyter-ai/src/components/chat-messages.tsx b/packages/jupyter-ai/src/components/chat-messages.tsx index ec2e0cf1a..8a70e5a68 100644 --- a/packages/jupyter-ai/src/components/chat-messages.tsx +++ b/packages/jupyter-ai/src/components/chat-messages.tsx @@ -10,14 +10,18 @@ import { AiService } from '../handler'; import { RendermimeMarkdown } from './rendermime-markdown'; import { useCollaboratorsContext } from '../contexts/collaborators-context'; import { ChatMessageMenu } from './chat-messages/chat-message-menu'; +import { ChatMessageDelete } from './chat-messages/chat-message-delete'; +import { ChatHandler } from '../chat_handler'; type ChatMessagesProps = { rmRegistry: IRenderMimeRegistry; messages: AiService.ChatMessage[]; + chatHandler: ChatHandler; }; type ChatMessageHeaderProps = { message: AiService.ChatMessage; + chatHandler: ChatHandler; timestamp: string; sx?: SxProps; }; @@ -111,6 +115,7 @@ export function ChatMessageHeader(props: ChatMessageHeaderProps): JSX.Element { const shouldShowMenu = props.message.type === 'agent' || (props.message.type === 'agent-stream' && props.message.complete); + const shouldShowDelete = props.message.type === 'human'; return ( )} + {shouldShowDelete && ( + + )} @@ -206,6 +218,7 @@ export function ChatMessages(props: ChatMessagesProps): JSX.Element { props.chatHandler.sendMessage(request)} + sx={props.sx} + > + + + ); +} + +export default ChatMessageDelete; diff --git a/packages/jupyter-ai/src/components/chat.tsx b/packages/jupyter-ai/src/components/chat.tsx index 09edfef5a..999d7b02b 100644 --- a/packages/jupyter-ai/src/components/chat.tsx +++ b/packages/jupyter-ai/src/components/chat.tsx @@ -1,8 +1,9 @@ import React, { useState, useEffect } from 'react'; import { Box } from '@mui/system'; -import { Button, IconButton, Stack } from '@mui/material'; +import { Button, IconButton, Stack, Tooltip } from '@mui/material'; import SettingsIcon from '@mui/icons-material/Settings'; import ArrowBackIcon from '@mui/icons-material/ArrowBack'; +import AddIcon from '@mui/icons-material/Add'; import type { Awareness } from 'y-protocols/awareness'; import type { IThemeManager } from '@jupyterlab/apputils'; import { IRenderMimeRegistry } from '@jupyterlab/rendermime'; @@ -139,8 +140,12 @@ function ChatBody({ return ( <> - - + + )} {view === ChatView.Chat ? ( - setView(ChatView.Settings)}> - - + + + + props.chatHandler.sendMessage({ type: 'clear' }) + } + > + + + + setView(ChatView.Settings)}> + + + ) : ( )} diff --git a/packages/jupyter-ai/src/components/pending-messages.tsx b/packages/jupyter-ai/src/components/pending-messages.tsx index e11016955..3635e41e0 100644 --- a/packages/jupyter-ai/src/components/pending-messages.tsx +++ b/packages/jupyter-ai/src/components/pending-messages.tsx @@ -3,9 +3,11 @@ import React, { useState, useEffect } from 'react'; import { Box, Typography } from '@mui/material'; import { AiService } from '../handler'; import { ChatMessageHeader } from './chat-messages'; +import { ChatHandler } from '../chat_handler'; type PendingMessagesProps = { messages: AiService.PendingMessage[]; + chatHandler: ChatHandler; }; type PendingMessageElementProps = { @@ -85,6 +87,7 @@ export function PendingMessages( > Date: Tue, 13 Aug 2024 00:28:56 +0800 Subject: [PATCH 02/22] temp add help message to new chat --- packages/jupyter-ai/jupyter_ai/handlers.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 8f5600ebd..796c5866f 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -244,6 +244,10 @@ def broadcast_message(self, message: Message): else: self.chat_history.clear() self.pending_messages.clear() + # TODO: replace once help/welcome message is part of base chat handler + self.loop.create_task( + self.settings["jai_chat_handlers"]["/help"].process_message(None) + ) async def on_message(self, message): self.log.debug("Message received: %s", message) From b80de742d47771b06a5d54eb1b6506ae9591eb79 Mon Sep 17 00:00:00 2001 From: michael Date: Tue, 13 Aug 2024 20:34:13 +0800 Subject: [PATCH 03/22] at to target --- packages/jupyter-ai/jupyter_ai/handlers.py | 6 +++--- packages/jupyter-ai/jupyter_ai/models.py | 4 ++-- packages/jupyter-ai/src/chat_handler.ts | 10 +++++----- .../components/chat-messages/chat-message-delete.tsx | 2 +- packages/jupyter-ai/src/handler.ts | 4 ++-- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 796c5866f..534d2cd92 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -239,8 +239,8 @@ def broadcast_message(self, message: Message): filter(lambda m: m.id != message.id, self.pending_messages) ) elif isinstance(message, ClearMessage): - if message.at: - self._clear_chat_history_at(message.at) + if message.target: + self._clear_chat_history_at(message.target) else: self.chat_history.clear() self.pending_messages.clear() @@ -263,7 +263,7 @@ async def on_message(self, message): return if isinstance(request, ClearRequest): - self.broadcast_message(ClearMessage(at=request.at)) + self.broadcast_message(ClearMessage(target=request.target)) return chat_request = request diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index 6e330ba3c..8fa8cfe5f 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -40,7 +40,7 @@ class ChatRequest(BaseModel): class ClearRequest(BaseModel): type: Literal["clear"] - at: Optional[str] + target: Optional[str] """ Message ID of the ChatMessage to clear at and all messages after. If empty strig, clears all. @@ -112,7 +112,7 @@ class HumanChatMessage(BaseModel): class ClearMessage(BaseModel): type: Literal["clear"] = "clear" - at: Optional[str] = None + target: Optional[str] = None """ Message ID of the ChatMessage to clear at and all messages after. If not provided, clears all. diff --git a/packages/jupyter-ai/src/chat_handler.ts b/packages/jupyter-ai/src/chat_handler.ts index 10fea41e0..daf98dfa1 100644 --- a/packages/jupyter-ai/src/chat_handler.ts +++ b/packages/jupyter-ai/src/chat_handler.ts @@ -132,14 +132,14 @@ export class ChatHandler implements IDisposable { case 'connection': break; case 'clear': - if (newMessage.at) { - const target_msg = this._messages.find(m => m.id === newMessage.at); - if (target_msg) { + if (newMessage.target) { + const targetMsg = this._messages.find(m => m.id === newMessage.target); + if (targetMsg) { this._messages = this._messages.filter( - msg => msg.time < target_msg.time + msg => msg.time < targetMsg.time ); this._pendingMessages = this._pendingMessages.filter( - msg => msg.time < target_msg.time + msg => msg.time < targetMsg.time ); } } else { diff --git a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx index 2b5728279..e0708b9f1 100644 --- a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx +++ b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx @@ -14,7 +14,7 @@ type DeleteButtonProps = { export function ChatMessageDelete(props: DeleteButtonProps): JSX.Element { const request: AiService.ClearRequest = { type: 'clear', - at: props.message.id + target: props.message.id }; return ( Date: Tue, 13 Aug 2024 12:37:18 +0000 Subject: [PATCH 04/22] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- packages/jupyter-ai/jupyter_ai/handlers.py | 6 ++---- packages/jupyter-ai/jupyter_ai/models.py | 6 ++++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 534d2cd92..b83948106 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -321,12 +321,10 @@ def _clear_chat_history_at(self, msg_id: str): if msg is not None: self.chat_history[:] = [ - msg for msg in self.chat_history - if msg.time < target_msg.time + msg for msg in self.chat_history if msg.time < target_msg.time ] self.pending_messages[:] = [ - msg for msg in self.pending_messages - if msg.time < target_msg.time + msg for msg in self.pending_messages if msg.time < target_msg.time ] def on_close(self): diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index 8fa8cfe5f..3d3ba9a6e 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -38,14 +38,16 @@ class ChatRequest(BaseModel): prompt: str selection: Optional[Selection] + class ClearRequest(BaseModel): type: Literal["clear"] target: Optional[str] """ - Message ID of the ChatMessage to clear at and all messages after. + Message ID of the ChatMessage to clear at and all messages after. If empty strig, clears all. """ + class ChatUser(BaseModel): # User ID assigned by IdentityProvider. username: str @@ -114,7 +116,7 @@ class ClearMessage(BaseModel): type: Literal["clear"] = "clear" target: Optional[str] = None """ - Message ID of the ChatMessage to clear at and all messages after. + Message ID of the ChatMessage to clear at and all messages after. If not provided, clears all. """ From 33b6237cfb1f36149a62375e423d378a162d9904 Mon Sep 17 00:00:00 2001 From: michael Date: Sat, 17 Aug 2024 10:04:26 +0800 Subject: [PATCH 05/22] broadcast ClearMessage sends help message --- packages/jupyter-ai/jupyter_ai/handlers.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 91563f0bb..977f1ce8a 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -254,10 +254,7 @@ def broadcast_message(self, message: Message): else: self.chat_history.clear() self.pending_messages.clear() - # TODO: replace once help/welcome message is part of base chat handler - self.loop.create_task( - self.settings["jai_chat_handlers"]["/help"].process_message(None) - ) + self.settings["jai_chat_handlers"]["default"].send_help_message() async def on_message(self, message): self.log.debug("Message received: %s", message) From d866e52f505d4cef32ae3189b2794ecadafd523e Mon Sep 17 00:00:00 2001 From: michael Date: Wed, 28 Aug 2024 01:00:09 +0800 Subject: [PATCH 06/22] clear llm_chat_memory --- packages/jupyter-ai/jupyter_ai/handlers.py | 6 ++++++ packages/jupyter-ai/jupyter_ai/history.py | 25 ++++++++++++++++++++-- packages/jupyter-ai/src/chat_handler.ts | 4 +++- 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 977f1ce8a..087e82cab 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -100,6 +100,10 @@ def chat_history(self) -> List[ChatMessage]: def chat_history(self, new_history): self.settings["chat_history"] = new_history + @property + def llm_chat_memory(self) -> List[ChatMessage]: + return self.settings["llm_chat_memory"] + @property def loop(self) -> AbstractEventLoop: return self.settings["jai_event_loop"] @@ -254,6 +258,7 @@ def broadcast_message(self, message: Message): else: self.chat_history.clear() self.pending_messages.clear() + self.llm_chat_memory.clear() self.settings["jai_chat_handlers"]["default"].send_help_message() async def on_message(self, message): @@ -333,6 +338,7 @@ def _clear_chat_history_at(self, msg_id: str): self.pending_messages[:] = [ msg for msg in self.pending_messages if msg.time < target_msg.time ] + self.llm_chat_memory.clear(target_msg.time) def on_close(self): self.log.debug("Disconnecting client with user %s", self.client_id) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index 8216fbcaf..c3c95b798 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -8,6 +8,9 @@ from .models import HumanChatMessage +MESSAGE_TIME_KEY = "_jupyter_ai_msg_time" + + class BoundedChatHistory(BaseChatMessageHistory, BaseModel): """ An in-memory implementation of `BaseChatMessageHistory` that stores up to @@ -30,14 +33,31 @@ async def aget_messages(self) -> List[BaseMessage]: def add_message(self, message: BaseMessage) -> None: """Add a self-created message to the store""" + # Adds a timestamp to the message as a fallback if message was not + # added not using WrappedBoundedChatHistory. + # In such a case, it possible that this message may be cleared even if + # the target clear message is after this one. + # This will occur if the current time is greater than the last_human_msg time of + # a future message that was added using WrappedBoundedChatHistory. + message.additional_kwargs[MESSAGE_TIME_KEY] = message.additional_kwargs.get( + MESSAGE_TIME_KEY, time.time() + ) self._all_messages.append(message) async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: """Add messages to the store""" self.add_messages(messages) - def clear(self) -> None: - self._all_messages = [] + def clear(self, after: float = 0.0) -> None: + """Clear all messages after the given time""" + if after: + self._all_messages = [ + m + for m in self._all_messages + if m.additional_kwargs[MESSAGE_TIME_KEY] < after + ] + else: + self._all_messages = [] self.clear_time = time.time() async def aclear(self) -> None: @@ -75,6 +95,7 @@ def messages(self) -> List[BaseMessage]: def add_message(self, message: BaseMessage) -> None: """Prevent adding messages to the store if clear was triggered.""" if self.last_human_msg.time > self.history.clear_time: + message.additional_kwargs[MESSAGE_TIME_KEY] = self.last_human_msg.time self.history.add_message(message) async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: diff --git a/packages/jupyter-ai/src/chat_handler.ts b/packages/jupyter-ai/src/chat_handler.ts index daf98dfa1..65c9fc085 100644 --- a/packages/jupyter-ai/src/chat_handler.ts +++ b/packages/jupyter-ai/src/chat_handler.ts @@ -133,7 +133,9 @@ export class ChatHandler implements IDisposable { break; case 'clear': if (newMessage.target) { - const targetMsg = this._messages.find(m => m.id === newMessage.target); + const targetMsg = this._messages.find( + m => m.id === newMessage.target + ); if (targetMsg) { this._messages = this._messages.filter( msg => msg.time < targetMsg.time From c20d1afa99299ad2722ca316e3001ba95568ed3b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 17:01:13 +0000 Subject: [PATCH 07/22] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- packages/jupyter-ai/jupyter_ai/history.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index c3c95b798..cea4e6c2a 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -7,7 +7,6 @@ from .models import HumanChatMessage - MESSAGE_TIME_KEY = "_jupyter_ai_msg_time" From d13345eec6ac94874fa301e0a50199f07f3a5bfa Mon Sep 17 00:00:00 2001 From: michael Date: Wed, 28 Aug 2024 01:05:45 +0800 Subject: [PATCH 08/22] typo --- packages/jupyter-ai/jupyter_ai/handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 087e82cab..0fae5d77e 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -331,7 +331,7 @@ def _clear_chat_history_at(self, msg_id: str): if msg.id == msg_id: target_msg = msg - if msg is not None: + if target_msg is not None: self.chat_history[:] = [ msg for msg in self.chat_history if msg.time < target_msg.time ] From 425cd34a394db4fb87ab0a025347e65f49180506 Mon Sep 17 00:00:00 2001 From: Sanjiv Das Date: Wed, 28 Aug 2024 12:22:39 -0700 Subject: [PATCH 09/22] Update chat welcome message --- .../chat-welcome-message-linux.png | Bin 16735 -> 16878 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/packages/jupyter-ai/ui-tests/tests/jupyter-ai.spec.ts-snapshots/chat-welcome-message-linux.png b/packages/jupyter-ai/ui-tests/tests/jupyter-ai.spec.ts-snapshots/chat-welcome-message-linux.png index c6f10885ba366764bf9ee92cabd5a6905d6b1ff9..678514cbab901d2593dcb3cce13f8ca42f6f064e 100644 GIT binary patch literal 16878 zcmeIabx@Uk`!Bix1w;@O=@Jx?ZYiZj1qA7k5+nraZjn*}G3XEk>6Y#WS%82@cXvtm zx$fuP@0|I~{>|(?XU_R!&v~EG(FYdmzVGXM#V4-iyQc~d3GgWKP$(3^<44lU@aGW< zh1r3N4S(x=n)e<4z_3?-D2dAbMZJVV-9SB-mQZzxTN`uLRNXzp+1$tfnOYX~p4El_ zkp!22P#Fs2je+X-`P^u^0rc(Y1gRx-L1Mr z;>WKgm_DO?6>ec*$3Ma>;jczXtJZMV9AIv3ZJ{T8e0<)%dspBhd?LeC5vp73RozhA z*_=om%}h^^5lBdv+wXeo_H9XTf@fY{qC7l2__VcNr^0x65CKVC_)ZMJE=gYSO`fjL zjB+T{7beBrh@}KUTg(K{J&NT&8NMHAM5Yh6X6~@Cu<+{TXcu><$dSc~dQu7r36b-e z5qWrevc}!5?RmG3rJX~n?|sUTy1~SR^DvZ_pVAHmKh@Ba{P5G4FIcF-Qfs;*ZA<^& zg*L&YV{u7I%=OW76_c{$BA(ds zi=y3`YOI2Sg5-XKzkXfetUckODwTBo9@$pq9a~Gzk zF}=OL=a@=`9hVJG4z_G;Z8L0+@P$8J9VxYLEw&g47A-O9r1Y(gy=!S6PVB(J!C^L- z|LxjNO9&MeO`cYv3HqFh-~0v7EF9JgRdI2YoxMFl0OPG&;wj4XH*VA!xWo%PX7hBd z4CLM91775k5xueIQ^Y{j1t$>jOtt1AJF%HKGPC{-MR=T z8BkkWTQ6`7H)6B1s5G|`7e}3~Q+}=M{yz0^iRH8P9SR4CtFp3kbm04{Lf(^;69ZFIQwJS)clQIx;h`Z4RClSh{@}3sNqncA!Mlzk z&toT4-D?c2OiS8#@7|e>Rq(5_ko?`=E^t^<_O(8QXEgf#{jrUW&1;1;DY%E>qL)T? z6{C(!JSvXZI(aY5XlQ79zCR}RO{^>{tE;caKqabYb>a!t={hGJYcE~4)h@Nt%~-M- zfFgis{ofSNCiM;h)!iF{ zf`bNwObH&C&d$y!JH3kN{5NkfUEJKJA;4|BS zZDqwVIyUwxE>6dC`(Q3oe{y~O!^_^(aMxG6%YAR(zgIrvU}N*EtQ7e`EoA7ReVL2{ z{>Q!iTs>Du%W_32rnqFkT751N2OFD#oBJv=Gc!8BF_17>D)7pqM~|KcoF1*@buT$A z_UAlPGkv`OE-5L=j_&wmyGyCo%hO}GPvhsWU%9_MZa<0f<2C&?*%8fq@VA{41BF9K zs~S+f*>G7crUoA#@CuD^U32q;OOCd-+#hJ~zkf%}M0w#pG!ju!(RAI)yO7Y_&g(im zt3$-P6%O^=U1HXo5=z>Jh72vCbcAAJVh0BY29tF@=-7on87lwu2M-UT>(~7VMGsKT z&CRIS+1YGmm6b*Vxw^On1paAhOfA6_zVJmh&3sf%P0i;1ek*hw{CVet&F4)mEe)-$ zIP~=NgFlRedgdzJc3JeiPaSnCoe1jd>v6AKX(=#jmFI|L9x1V8?Jc)I+F6Pcu>M-_ zmZMdOeR{nA(7}OECX_a0XU73mmzLo5zQ*%7!j%BZIW#QnIzEl?brhFb_l*lUI5RV$ zV%VrJBaAF8cpEjx9M}Yue);)a)Pgp3i+?A|?QxFx)*7E#S6_r4vpQZqlD*3jnv=u6 zy0-S*+B#Ib*c=13y1Hs$ZB1CL9YVsYo~V(hx4FC9od3Mh2HKomT1t-@m;2r;hqYl6 z3=9m-VsjEnDXFbafr&daVWp?6!{ht+pI^+Y`upBxE?mQpNHR{)R?;sZ053%5W>s}{ z^V&$MKapfpQ`6hI_Q=che&k|aRpj^Gihln5N#yVEZ#dQ9&mb(UeVvR9zUi`@k=6vM z06+1<)HDdXN|5XJ{5y!UlB()02=DE`fB!ah#tQi7<>ei0HWEH~{`{Mu^LpH3PjaAU zp-Bjl&`9FR7M9;v$ApPjkTjVT$c6E{2+S)cWHcE|_*-GKk zH!bz1H7AFPvg~b4a29Fj{PS+()s+W(Bi13g$NLlBoNhad&$&AO*ip<(P0>$JPs2mL z8mmy-*_fD^+=5zoS8l&RnLSWuJ0q>6^a*aEIfyLC^sB23F1@t6dc@*jLGr=r{zUi! zr%u_^U~+Cr__~6^2OE2P;>V94V{da6>mKc`vBE5Af)8kX?rn}!Na{bOTeA3b?OB8gk@YKZ6R zwQCF$ zSM_O#85uuxFld|TqPyNhL_?~@LafZDPMUUcT%b5<(#Cu=x6UW59E0Sthl#^Bfv#_s>tZ^NXuTroG^74|WIiZ4dt%3yO%` z6c(n0(!sHD=3EgwKP3F>xi@5&X1wl1A!u_$M@NT6CpYI=DZ}mC*iKGP}ZCn{$^d04kCW1*bZN8eo{zFpVZNdT`z5*{@=RxDt$zdjbl^&B(it|cBk zlYBJK#k+U!qG$RC&-QOVvylJpzP}FnUI)QPumQbP019<>*yYV2DoSf!ObO4pI9krD z5GRNcF6%$l7)T}G*wb@$AWuIK{;S`doqbXCvKQOjaj5J2pFgTa=6!f9;j$F{6^<4F zJSc)S{)Q~g&ia{j#!%c_hrhshHTGKg6)$CGmj1U)Cb=M;v$(unP3=hxpVFCrngY$A zTwSV{FJD3eHcr<2a=#kn>WC8}hSR>o!O;T!L~p(5^OrB3!yFLs9GhsUy8!6xlhym< zSSS}4m%5P=ss!&dp$dm3!yO@b0La>a!ootWl^iBNaW}V$`jUcz0*N3}_TZX+NnD*; zFPhxkTr?LFySI0)-T=fjUj2$d+x%1Um}cd}MtJhJ56#qe;TMgurdni{N}58M%@pS6&!17KG{uA#@AKmc zo|zngYH<7FaEbY@1VVj%{j&0M=1{TY)k4iuD;k)*em+<@ERBGIrdC&jK`ksj7$vfc@b&yFH%rYKx;a1pElU-joSdA}4xJP{~i1=6>hZ0wPb9kA8FYrVIigkdR19N}_}5;K?8^3^#6|O06e2w%zUI=iIYAg$*olw7+(wH51jeVmM-J6i z-m^eLMrO1&TpTW36b;P=Mi_Fg2Zx7KP!fYhFICC|F%W(M_eMlQ;tveun^LljjiaMK zG-p)rKcTzvXR}TGn&j-l!ey9|TkV{+QT#P$*_CG<_O1@X@ry%+X$R9moY@6YgefN{ z9#b&ze#Hr?^ad&CthdV<7=MO3fQ(62h!~ z*8@oQSC*DN<@To8+QsO_OX$FNvbgl-ZuD={-R_Ul8A)FdZ%B-+$t};MZTUP5{dBrd zLqBJ!`OZM7*jf6Cj@*1kf8w)3YPDXVbrbp~>mUw5m?=rx|&F$?*m|Yl&qjoQ;umT$1C0 zjvUJL)df>RLgD}bbO8{DrsihmIUgV`lDI(NE<(VNR&Ho$h%g;v?IYf7sB$S_RO)$p zmOXCKEI9o0%Y9ko0#?+Jctk`*uS>_>E*~BqqGw2Q-0kdmWIj+|f)YD8I%44A`JI^9 zH2POdrI%;7BHSd?gI`313JFu5UiIYMod4UmxPZ1Wq5U-;ZZ8zrOi2PDG-~j>B$Ji^ zb0+Pp8y-B@;&3tR*>0Zq6cCv?4#k%V9)B-2G&CS@05wy!{YwP9_^*rAKoHo~#yjNt>9Y!dxK@yh3x0mX>xJ;I$-f{L9~bDu__EiY zaI0Nm@q7N*(#k6L9{^b4kb&Sk0qgPWz%NLwt9K)`Rzz*5rMZn-@W1t#`6;9klX2=@ zhvp%P+um-<8WWK*ur}sAp@#n8&!t-B0 zFmMQ0nl7F2BBl(H8Le=X3#Q7aCcHXShyzFOr=8OR!R)T_boaHD50}ED{}>k+%qSN@t}_!K8Hoc>4wv2rm_bU# z<+5ia>OUX~BoKwLurR;iV0_;)LvFgl@IBwfV>-bFUaC_~H;w z1H4jy{~BlvT4yAWvj`IfybL9-pl}6bp%*_K`lv}P2LPP=E@63VT*jP_#MSTzJnkPD zc=O&pYUndSZ<;Q1R=w8uI&5A{n}#wea9YzwdJl{SESvpj%3&2&X*-w7ers@KsIrt* zR2TxeMEFC64!}A7I}(4)i_08WBAQn;l!VM4BdDo8f_-bPbb>PeLx!VucOr&K*VhoB z6f@AMT$WNI^WApM0n;{5{~datYP{^oTCE^{IG2@?kpXorqpbY7CqLGn!4dE~?xjoh zFlVPGCtr)5pAh%-^cas5x4bYhF$6#k{EPsRbs%X$jxg95ufCm-r7xf zu(Q;Q?Ht4&8WeQ-8Zj|}EZ)Np)bHR3W}wv&NhT&HqTmZkqC*K@M=!JWYiS{hsC?IL zli9gB3i*NF-rm_UDfw{UWuT+<0s>?k)%&z|c6K-X5a{J~Fojdte;R9Dqa@2u1xO8! zYwlzH?|+A)P4zG{D~lB-K9~KxJODmPT%YzxP9$`wbt?g*Kp{qBW8-fC>k$12kXMD{ z3JZ1ZcgUng2jMxcSzx=Q`U3`;4t=Ua`59}Jq@)3W_g{^a5CE8j!sgeMu?WMBRi3ln zfsV5E4dz!RzeMG?-fG+TPw?l&~WUbXA1^Lt%8p@Lxu4{+8o>4)b$yAde0h z&HGkb_l_Hh&@(}&0c9Y^X8g#myQ4!MMpT5m`*Y97YYqOFuToOJ2f)B*(T@+O-LII7x|N?PX|l@tu+JC|9MZ+1B9M|F5w%&{%m^U{7e9}wGhx!453PqnLGtk z4FF1H^x24YZOFyTqz1qz5gq}}$aIf@5osb&v4%fC(t`v_562V;O(^KY7Sxk5Xbv|) zMky&NA?G&qlUOou`!NNqHt|6aDYkKAYKcW-00B~MphDrm2YG-v{~vo znGs8ox5pUs8Sn4@QJD*fo3-VX)i?)S#Jmvbb$;sXakP{MXOYmCYK`{vHl|i`tRzdx zVp52!n{E37Q>{B)>85-X*SC)9{j4v4D%Yidd~^e;k(5xr+q5&r9|}2^Key)PL^!KP zrfUY8jD*Bde{5$$RVe4I!b{GoJ1RatWZ|p(6%esp=A%sk`>7XtpP%7z4u9^%B6hHF zBN8^$u4HrWh~k#Tx@eJe{wrSO69}&mu@o6OB|5(~tzH)%g=_OV7fmw|u4*xmJG#=a z%h*DAd^$*bL461lCcwd@FCijXf#AphF(%{CsRLNmU1+N4us%v*U2~XP&lr+g5n?q~ zK~n9$9}pT!hzJTGQTn>Zu;8$V@{1&@|Cu8xa}U|xoNC+xfS#n7Ac~#xi6q>_>u@eo zrrl01C^%Sk72a3yjdwTIYJ}EsFGJhZ(hGfA=)717q&)>+TQ&g@gFjmbS7#% zAHZohLoP{sdDTSmz5ER9;uqPoaUm6?L4pidZu>k})Y7l7|5*f>{in~L8KC%@ zetx>y4EfR?e~w(!Uka8~k+<;#|SCJqh-kYXf;L^X1Cu8vhYYsi{(GBbNc zX<8eQmp;jEW%15DZ8I`L8B}$FGtBcQ$0~sKb!T6htko0=+euDtID&y{a7&B!}aS@vgB{9|Hu{rvf~!77uw z$l>wxm;HVD!#MSzdr)bx&+ z+ivy|LPEhj;wj_M)W703)o>9MUPR$WDhwuVXG|@>FjAg<)S%?t6+M{50tws@C?eW= z)D33O)2u#(h5$7U{_2SHn)bfi4H1!NmmI6zcEM1%0yGy?E%fSc9!Fo#R!I>$TTyL7 zijY!o1-wJOHt~NMi2rs&?r}+YlUGK>1dU0tZg_3ryXXBe>ke^z#-4{|(Z(akG65v? zDmnREko>6d^(!2hLI0K4lb0q;!QhP-IW0& zD75_2(!BbV7cO25XO`gWtNxK>%)t)r405&= z^n%UVFN)J`ObJcE!;|h=Wl}9xxo&R)p*H+G+XiNdBqVjTLVARQEWa@b--1r7pAO*=F_F|v*cPvqAD;vwo_x}FP1*$f}oPkBioS&WAPB&db zIj;<0VPc{k_@nQ+@}xeAHF2O|TWiwpaVjm6KlD-V`&;_HA|r<7B458{an+S<#&)(D z(6B)+9xSs>25tjTpN5^%^C=MaG zZ0_dfCJ~e3SJnBArKOCpo|xq@?F#pFAp|JF@MQ$of?XzLTuHE6tRs3iFdO`^mIH;_`>p%f2WL1PgH@*PkHRIdV3JvvCtXB}F z@ET2Mrs9ylVb-+vvI@J^jXss}VqiT6LBGbxuTserdgJSR6+{zYpA?{50={Yh)pN6n zSP}6<@{5Zfn3>(7^*-6MvtBmfkdu|y)zyWdU14Hk8XOv$N{Q5ygehRRv{dJVwFTxI zv*Q&QkBs0Hs#poo$e}%1hu0$_n8nO8i(@l0l#N0%aFXSZ1R4jnXLde*1N$UvY+m}SQj-O!jp^|;wZZ~X6F0AgTE3?-H$CY$uSF^qFHB$m!{ z?O}9$eZx=sRGAYCWH~KH^*dDe(eES5LEw=|i;jxYoZo1r^QIQ>tMI|hFHZN zlE2xUrIE+Nn4DS>pxyYxxZMWW?yHF!8i1S^kZDO?rS6MO&;ZF#L`@xxXvZ08Us;^p zB5L;LEce}&6rO$gS0KcV#?(Uy3d1ti!%5ifU7c13eIQ$~z;FXWix>t;z~-h6N^J!j zPjj5v5r*yoScB{_I5wpx&M9VXTbyTBIT2(x9HY86rpmg^ffu$`A`DVNawFUGO!t%wNm=2}sP+p~p2>xUK;Z0?L#7EJ zpShYnDuh;B7`R6j|MqOJ4*NbI-}MQP?I%w-1Jt5>FYZ0pv6fpKs~kwQ`X;QYJnkmu zu)#m)L*Vv+_Ro3@;ANMUoYJs_wai|m35I%~B+CnN+3dd;2CErN)5d`TGBABxVPOP? zsy*GK0!O+1WbgC_Z-4n+xDqf1{vzrA{ioQ~snZ*eU(jl0~Q%vDFE!Bo1m*iM@QdIBN%0;S(~V> zRTj?73z-1=1_D_=NMX_s9z5TitOqE=f8Xmw5J@Z`Z}{X~C`fMpD1Gqk5z!q$W)i6J zE@y`e1RSP0i>F$83&X!F*P-b>Ffh1LRaHd_5Vw@MvdZ(g1Q{gYNvOBzm%NH5=Xvp#DCrfMrpk(CW>p_Kc+GHf zxBGK-Swa4y2WRl9i3cGe;Z-uSv|urdezq+T-QU5j+MXQPPR-BX1_o*5a0FxQIwDC1 zan>MSGy^B6B3P=LxLeMyTfhv%x1NOaOn}A*h*j&=Xql2(u{HoEZ(MJ0@03|hTzc>@ zIgxn>xDGLDHOp+ifl5jW-3#rMxp?|A+upM6uR7rL|5*l=O&k6_VhNH7LR=-2dvN0w zm^k#R?!a52$8PVx>3dV-b?P>^0T>2R>W$|RZ4@vZ!B<{@V+CMM6w*QitQ@R#&iktg z0w}~M|IQ-3oDA3#sc^gkMaj$F+u#EmI#3=Z(up|9Q$)navyYCBHl@f#hVAMVn}`2( z0d*5xKo!wMKcZ=fIbtdwa=C0ZlQn~O1wJ=u6bQmEFzuqHmmU%b&e;8W*D@vwe%IUO zX&1`|(}|5Q3SXjc($M#0gs>-8Edl@K3?QVl;`#vm7y%Ek+YyVLp6AhyQiD0mBEpgK zYK}5{T0x0N7>mDpmRi8tc=a=YRTRPdK-@3U(b3WS5dps-{`Tz~b||M_mF6Wkb93|P z0iCSA_b{B^!8Qmky?m`$=Owq7MlHctfpn5chK*UNq{?3fRC*1OPF!mHZe{Unv1tV? zO-`aF*GUvYDETEpWRGCedINNrWdS2}ej#%6+LNGV|Ts^_m-3BHp4 z^d*$z@*gE(yNdF146aq`n1~4MXV0GbUeh{-&4`rx=H`oC_qOp7rV4umGm-jU5>8Hc zL2y$sshQ7qHby@K9n9X(&+iV?3DWJMVrUW1pFIxLE`O*YaHpR+93!;(WIN%UZGQ_{ z$+}J$&0?$|C&%kTfNK0z0ksK>VCd>gJJd8Zwx>sq;K8EqoS(j2KL+d4aj?*o&}zKu z66k7RRfW4-6jJb-7&N~pAs2CC1ECRVeZUvhkIN(9tGWxQfj0?w8ySbN8zIxmz3T!d zRCvuaDzVQzy!}~wY+8Y}l(93{(U4ZQx<1!*lxH*2oZ5`N!3KEm| zsnj)LJ&MlQDdAQ_L#5kO-*f3U&}4n+b{gCj*j=JxD7ttvJh)e_gYJhrej~l9uTyN9b|8(=A;nsAspyTp&KcYK#?$p5- z!t5TYgo2dVi>y}sHnRcKWU#rwP0MZr1(`zL&4crb3aq^JtUQ;u0*CP~ftnY+j7?f|+L z(t$$6X*D0MwF#cT0l*h1S57`k0zhj73fU1RnPEcU@Bd-V+FLa zk1;WLlUB^DVDMV*+)Cz76&^4FF%g_SG|n018-g^$MXEtOk_0;}n`r{S< z#W79o*{cVs74aR?m69i?r{6%C+&Kpg*B>Ycg4alLV})Qxr~q~o5nUE?Sqg4*6MTqp zk(!L$R02xAIv5KDW<3nBY(od7Eg>%-T+as{B0e-lgp`6q2Q9Y+Qm}J9)+*C}4P-|Q zSYHDN%MjdHSoygO0;ul#4x*$X6B7J>u=Eh=f*br2l(Y5gX|O469=ULCYVX?tV5|pP z3u**mr>;!;Jb*ER`|f*e+q@QzS$jqf3`ZlC8#T{vZqJ3=N?)7I75Q}GAVlGo&oRs5 z(MtQqzWd9E4<2ZxT=M7t)AR#TQwvSIYSOm9XO`!^@e+Ig?p-|~Hw!oJx0!yh8Y^Dx ztdb)ihz6byJ4%=|B2G;JRq$n{5LeRA-@hJ~2VhIqWg*@Mp4k{Izyjx1Jx@Q4CEZ*tm|sM7 znY9G~W;4VF0$eT zn^g3$=0XokyiI@S+6gk}K*qfd;yE&t{u!ZFE(PGj5`&cjTT&;dr{OP2Nl4xSEz@$% z&G_8@Tb^A`O{BV|wKWJC0uVT4sw3m62mGF``B-a~(Bq>cgh?95Kr&;id8%&1dLTkq zc3Y1Ge&`v?Ny(M`90QIEj>czzlT2T|&s^s=-v8@vRrI>$YCeGkNPwor!ovE;`fV-N z&OvTU-(w3n>05wR@0}++H9+jAlpl!u*G;{IW!`I$jOYpQXG!3`_{o`gc$Aq#Jzv zKpffEt6)+gTLWMdhx^2^yes-1ZO~@?M@&&aBKGT7tEVjHMn=8>=P$uMx=lBvz&coitS8`+WJn+*ROi1>?&U|1%oiY4UGBN@6`H1nB z-wKk^&=2D>hox(Kt3$q^x@#E^wYecou~iA@#G$4{t-#u*}pjg(-9gREOCWVaYG~1Oqu;rlrKw1$q2eEsyva)*m|DoO=>7}^f8?b^0OF{-<4I*1~&=OdS zS6|$(>W|N6U}nAq`>BY8OkkDrU>`@1OG+)Z&m3vSeu06!v-`~b$tvN9Lg)fZP(840 zmaSV^l1dhcuGe*Dxb)99^S>3j|K$R+W_7xMzO!LzLW_@T5~vT{c7S+AH$28(C%ii7a# z>CMY>HSKkC9Z6_~_ixct{pqB)1MfS+7HD2#8Ck)cwWQ@Ogs= zLaviiJuiD&m6{N(o@M;=UFOhS2LmisvCvRzEK|{xjf9C$jDCEZsiCBF6>|7BI({+t zw?IWkZo2df8b)WJ;lDLnw(v!Is#W@`w5~f~$1a{3yd-6cI5%}Q9`90HZqCd;w7JbrSE)D}AjQtFhi|29QWg0oHc^x5 zX*!9^?=(^2yUwpCL`-c(63D1}sv8qEezS&FCg%E5o>>)7ZKzj{ZY_%pPv$JnV>jLw zU4J8_pQ6QAyT`E?)mJJ+YcX@7Rondgc>m;a;gHM9c^Yq;zWtw)(s~P#FYNl!*E*s< z=s#mxb^S&&YVuX`+ulZkJNCVPRPPaAv(e>W>MtpN9+B_Jnkm(i$~ao9uFsJ1;$fi9 z%?zz~wmxBmY;K)f-Prx$T8zm;vN>cXaN_h&nXP4QR2O5SGtR}T1NKrPot5K%EFQZV3$NjB|Rs`QLyI1HO6ZlpmZQ!LqnO_cDN{%k^9}2r{@7KUiJ(edYnxvqPts5_Exw$E~Hu) zg_E}@xRMAQgcG~oU;fGd=ed)EeYQsl$yZ(Xa&-GmUhL}6q9@!~IaI^X55%Z2Q3?@w z3FGA!{s+7{sCXS3)adC)P1ie(Nv@YL?v*cd2CofWAbz8}`AEh6`vh}C64pTe%E6jl zg7d6h?%#7NA?<8I7n;inR?oS9%_j@6o?0HR$SeBRu34)7WH~s#HM8v#!fmY|^CPfu zrF6uFBcI#N*`{Uui!-{T*4S5jaobGSrAx#&w%j?5cVmT5Cu-nQi22A3-bf}c)*y+z zH`e8|&5m>CXrX48>V>%JkA#@gn=bW!z8o z7idh*y^^n%Wss%*?L+kx=J#mVuXR!B`<#+0C;Jr--1{02^g=LD=6z%K6!u5u(;+q9 z3Be9ORFWUZ*G@PwE=*K>qjA6-+@BW1pZ}u!GgJpm=2q)Y66 zvU|=ny+ixbJoG~+O?YkK5r0#kd`R=eG9!uE#r|efLvt5ihga*bzqB24sjuXlJfd0^ zvZdnma3YB@^r1aZ_f?cB;wDeq>1E4^e$>UnBe>#~)}$C9jPZS(xnXCF`QYr8Q+e+l zp~~534~Q^bJPuu7kZoqi@o&9}mL;d?##L#IrNl2(!=idv(tPvDxoGeD-16(rGO-T%cxdumOsu#^|fvSKFRTvt$Fhps9aNvss5i=7&mA% zyLzw}@<_y0*bGdQ8|+C=loL%~35OT&rBVIUNi4ng^P3AI@FZxK$vu)I9d=W(#p2pq zKAUQ@kdg8`{E!gaa8uJKZ`-1(zIG+9tznc&=lL_1y7iDX8yjUDI3ZlBV!ZW`c0Fbs zlr{~^Nd#+7;-kcpc*-p7(f|uj5wjxUxLRlkclGMa&&AHhTu(?F=yHO@OvT)2MZ*}7`^Ih`PsL)Ide$8W3Hdp;7SpzVC=cmOK{!nqA2+vO6V1mUu6RI^X8{^ z?=#Pdnl>zGY-ww%cx*|E@MT&Cb?C5P`P#1jVJX8-wc!osz~|DBzs-x`gD3%ApDp^z zJ@#{iPGgMZLtDb#)-&mNxvcdo!Xw|(nlRlK@P|(v!2;uRBO@c51ZR7jf$GoW$7jNJ zc#eW@LS((pX$I|G0t6p4cJYcasR&ICTfVBEcrX$|Rb~rn9?sY&p(k_Q8OJ(UW$G$T z34UGBulZI_A5A9?-zCHeazaPti+wJ`;TS39&(0`XAGHDne#cI9v*q;EOW0)_hqQv50{u{X`D`s zyr&Lhmwc?4R9mnxi$%%NY>JJm!h$`$Z<&QAO=B4?c*+z6ayP?0>Zyg#p+Jcw(|RRv z>vi3dSbm#!G&W)pPzkGN^7R?GMf^!#xkR7XyOh{t_7QeXfhdA8#3C^eDkU8O90oo> zV<{*k^bOc+K7{(fq|~e;oS;H<6+XfTzYyikh9NWNvwJ6q(qP-DNqTj?xl%V7`z)=O z0-yENn}J8MjcDauzi-Aq5l>=ZCRbQ#x1hiIrP>v@m-zRT`m~PyO8QoMTwCH7&l;0R z|KI*>)_?gxUo&mV6GsAEQv0e`27) zSDlK5pW(Ml4$q~)DuZDW!HrDNjG)ny>s*}zAME{>{(eAZ`6EmURA0H z!ot6W@kq+NB0k%xl(p9SWo(?Y&U5Y#));MOxmwL=f7P+tO5@n51$UJXa+gS)17eTP z&u91;(9lfK(8TIDhj$JI?>Q_aY{qiKFVn6?PfI`i%aXgIN`w2TpsWS1B}qd4p5QwT zDL0)Ztz6uM&_C2wgstImt5=XtlzYO0o=^PF)Z6g@O$&^oQVo3IUgN8$% zA{)d0!0peSkkHT$IkiGvA;--rD$}k6F>7mfLPEls%}u@K8ZyUJe$ljE{q5P-3Xg+= z)<~;?ECQ3CA1?`1hu~Fi>r+0Xvi<#?AdS|=-d_CiV+_qQ%iynHc_{fEACEc>S9=_= zM^HX|Q|EP}`oIS*F$>phyo`;KiV8>E(#9r|M9-5zF8=;Cl&gnFn#FK|dZj%*4|({# z#I!WJv9B+cmQUR{BeHO}wze8RKY8DiB6elIY-UEE{Zf=~6YP&hK2pW-s)5Q4r(7{}_ zxs}1(sIFx6xuBI-tHXsc0&xYZx!YX=n<;wr-nK3-(lRobxK#Yg8|8a&0NVB5R8;(q zS+?h=RPg*+U7JgCaXgJ*U-mPfQ6*MyNIJbrOr%OpOS^<(XJ^O2z<5<#Tig4Rijoqa zloW%6gap;_>e3ZvoQDkxa1``ROxM@PN`1q_aWFA4i#(6)eHD)2F^t+jFxfgf`pIWD z@2w8^)_Hln-)%T)s_W{?KWgN z_wZRy1oiYN`i=`9FVVTWyU%{3@lm#BVP*9zFXux7Iw>{8H0^UCbiBK)w0;CRV{~zT zrb_zurJI}E*6yyPy}f-n+uUNZPe5iS1LWwljk<$r9~v$$uJ&muDJghK%NL_>VOxOfvb|JU!(9_e>(@RyZFZHG^O;mD2_NGqkcW~5;X7+zF z?M@ohxid)6+S*Dqb+DY7oN71McKD}*kIY8L1B;0Xm>Hn92l_~-{T^NGJj(tP{<<2}wBZz!L`qoYsWXUC9t&8J5@y_pI$ zNG)~?pX4=CNNGU*x^7f1;^X6^L_|ce$?i$IZ6^qOkzc=leQRe&0;+Dx>p;h>R1+oY z?0jEPNa%~jFuPx1VDtDm#avss1pFm2o90y?AD^peXolY+sL(o)^nRx{ze12$zP0E=v;V_b|#PIp^XZ^#iU#M4pepmeh0%n`>M6ROV zkJI11ixx`4@km_!^6lHVZD$%TrOnktm)Kt)4;p^Auz7^(Fr*I3md5D*Y}@Zdp1 zW8)<#$t$Rl@$qK(nmGzHS2bUJy3Xs@=4K=T-J=mGa! zi#TX)&GMPHHmiKX1KK}3JFWRz<$m?`^{$(>2a?a8y?x+wUb8e=U9Mj3oR2RaRPC~f ziHC>BW%830atP<`+qbl-si~6c>K{j{oN^f%8S#UIgPS@!WM7z=z*|0WSO{}H{1w=G zes<#jD2-sTJDH@`eUEV3b0@sl>0+aLQ*v{v7RQ>Km))qQrbg1xFcsbe3OiWL7maK} z#HbCM%jewFcQ209;0+@?JFz&%7t7Im+XG4p*J=H}cXbi#R5=pVpY6y0Ii#134tkdM z2>JWYP9kb*>e=JHRkRMyQXMpG(p%!<;w|A6e9YQ-w0R3ZU zLRyNAJ0wL#U%{>1KmPj`USB~E+1+(~rmTF2m{t82zthScND>A=&f2qZtId3YO zKiUqr=Q^mq#|ygMo0ypB$B$}LATQnOW7I3STf^CwV3?aQOoVjZm+4XT)%$I0Q?(Q~7~~~GiP`jF`odIf92!!s zoF*TyXD=>kd+NTo;t$wCyT+9OV%k__(2PR)T%1hX9c~$9DKkId=TFrtw|?NXa?{e% zlGFb8V_!EUR12N)TYvuCs}OqlkZgUtJWxjET!@L6mk4fFh5XwU`m?nDzCP>>ma`^D zXXi&9(QG&yF7So5bwP2>&SE#6prCf8UcRcLor)SnFY1ENtOo^yOj=WuTu@Mu z({V{LjFjtgiD`G+TRQ2KLG$gUv`Ezlg};BlC@GVL=V7Mm55B=*yFDvabGAQ@LUC|# z93CIf_A7`;9c)a73qMMKjx<*)&Z~ z>Qr_@kAVzUIxlP6FAU8ibaZiXadmZ_fp{P84eNa|A7n<++n_k=Pw!b;SrL$u20_?9 zIjs(%Wa{CGJ!3X1(~0!pO)snoa9A zoR5(EE(*2e$Pe9_)T_YC(sE{DAz)xY`S9=%Ai`{NY;5*ZBlDAZ&5Npb^ZGP&babP$ z$b6OuGBH_?&fk0hC0a|VJIp0aP z@6Xq*C6A4bg|}#E^2dVF0L>L|{ya0;n;bCOw@(s*oG-rP&pR_PFjQ1j%>0?}Xt#>C zCP8L~uT9+%8YJR3H9fENQvur<6eT4kvmj$;uUV1TiL-HKG{pOx$7XHd=g)Vt2F>#{ zOEJZR>_v`Ngtl95xE@TpARQdO!Kza=Fmo$Ad)>y;V;V1yd>4x8>a}ZUybe*Amo8oU z{{6eCt!)MsC3Aj z*!%(QrNaA+pW5v=f#Xt7<4oOhf2L7e7#Sc9CFc^hQTDdelj(~`(Db-mxAb{zrnHY1 zH#Q<5z(}?69zV!e4TWk@S!bVZae^H zOXKAnardn-p~zsqU-^&TF3v`SeTP<3OzbtZFnp1V(-MvT)yS&VNI^Bp%>H0uuj8O@ zk&6lPX3twhH*aDvgy+1^Y-qp3!GWh$W(kuIr>D}vG`H&n=#w;*K1|p= z{^=796O+3UR03#;@|m9V+gn>j#}0bK>ht=PDms9B$nRSQ8Fxf$cn(zFgK~M5pU-x_ zSLnl3ovv(u)e8m*%rAd{>{U*y0lXGNq(Y)cx4saF2aQo*B^b;@Kd}l6LOmwkEnehR z8}?OyUj5<+VGU$|sZbc^*;X@Ndw1!JByyZ8-VRz4HcdZh!Wst3jvSEk28&&Zr9#-t zgGau0n)&;6LKMMS1x4))$I>)je!!rRkU<)$SJ={`l+|$`dL27nMMrP`_6_~luU{jj z7BTlYysj?<;81Lh7)8?FxpS4q=aftMWQ`jJAoKidC^vBoKzc-gHI|0*DQ0J9ZBGyF zjCGIc@`OD0C1He4`CNE~4s|F$%;Fj@&}sI&egiP)E69zS^W#-0EyayO%gOie-*dU| znjM~;G*3^{^!E0mU87%!?v9I#$zIH7RhrLz`0(LJ5g*~>{dFwJaY-quUjx}n>P7mO z*tIKO0l2uu!*dhH5D)(aopi)ZFhkO-u7_cK`cWV#<9X=6}|I(6~@1N&N>A4y00k7N3SrqMQBVdF&t`6b&qT{sz z9>DS5|N7N&+;H%W_8ck;fm7O5j!!UXC3SU^V2*vjPo1^@t{{9Q`NG)vdcGHzEe4aDLS7L;lkg(Ddy)cO*}b7Lx%Ah5&lN($mv>`|jOsKm6JJ z@+rb+&z{`|`f!t&I0U-MV9K4|n@aO3;-t|73-j~AQ?(wU^BpmvZ{A$CTODHK@;tQN zK3*-1FWq?0pg>$=(n)AustMiMZn=*hL4P+=KVc{)v0*ck%w zf^Mz>FWs)cI8y-90%*;3?aQ|o$ZKY=q(~YOcJH%2wNI+KtuRPhE9~Z4;53;N&U-#d z+-78ad~^bo<|dGSl_72(9%ajsBC$#z0Bz9D#hIk0pcRN^XJ_}RC>b}4E7$1&NA@c$ zEF3AbiigHLyEa;aTx551dU|cKqAKSsxParmpu++-FtC}mwJ__+DwX4p0Q@Ves>EUP z0xw?b&s3=Fz>xdM+4y_2KK{N{6Of-@>+3jCGX6n97(jP)D)00I1>v-tm8__)MxZ{h z4IaiPJZe(&DaPy5^)!H)T#x2sel0Bp0q56W8OTPXkBG{z28`3!-yh}BBjCK2rL3x< zDXcMC^Hj(tJ(gKdPEM}1eA+t{#$qb8rJ^riUT0=zN?l_<#tIq8Rl@@)%kI)ksAOnp z*z*snoMQAXf>hYkwelOoojZ)J{7_tvpFDXMqVY7UATF(V-5of~SL;b~i2&@^&>J9K zu+8yU*(M7@%zdP7K;D@0KC!YKExuM*^9PgKeTnvI98Y#r>SdIAy|)mwDPO=)Vt_dT z%q|UNzi;HpXs7kZe=INGre#xWY#ZU??JW$Hgx0SC?(M@0d&+0#JjW-gmCP*YZ0mr; zVT^Dx+T9cvO@B@cjSl8uq2r`!XPm!EuByT5;r93LZfRxZn;8npxS$?{b}+ESsX-@X z_L^$Hr&1S}n24jLHEtedr(5NCy(5O>8?e==!WWg5m5RB8CW&hfTQg#wz%*nC6urH@ zD;-Yu{8dpiOG_9iU;|S98FF&be}^G1Dao4CXQt3xtl^aK*Gf@)W5xIHk0l>H5(g#* zoy_(8XmK0*E`^ucM7@v5P`*|mK%Z0yE=oq2UMhCSxt;nIi^sw{kTNDFC%GIJo=Qqf zQ&6b^n!}dC=OxDhoM5$mtfGpmsku6o-9 zU4LDxxi~+82^ZHoUlP^XF|ecRocJoFLrqCZ>C>RO-Py5IngQRU=n7;Vu#Nzz!e&rG ze9n&;fWmgv-VOWF-rfWyQslg@xw|@y2Q-s{&$-j7u>(7rZ(v?e|yuS-lkaFEUbxmQd ze-?rh2vdLwOuCc80qVrcRj>5^nCRqW zJY-1XRC=A1wJ!30WY*zTxo>G+{YX;s7NE)Ta+^R+k4g@u{gJp!t5a93|Y|-UbBJY!oS^ zx?FVu(M0BIaY@Oq-@pC-`V0T*N6t%d!|z$t3R?0sN^bdy0%&Iue5dnwK#ke7%CI+U zc5nl5sY2jQc&sN#U%!6c{%nqjMOA;fFCF9(PC%@oKnP~^E{;hM@V-6w&1fi3nkfOA<*g!rfS@fXMzUB2}o`JN<#wl z>d(T%UciVz_x;@1eJ9=6)U+93MIatc!H|l@()n|BbqvsnfGLhMPe+Dpd@;O+Ol`n9 z1K&?O-fi3*=}Q9n15-l&PX$SO?gh})s5uGH4JiIre<&aHnD6kVcO{caEB9|mEw5xN zWpX;LC<8d=u^L0BKC8?6S=^ny0)e-zpQoG{-nDw&*nQ}*pbwR7BA073<^oy|IkieF z^y8HQsSCNN{*~_4^52)k;i+90VogDnj?7GQZzpg58VBQ7&Al($aw9%PtN*NrpnuG` z5+<|~IitG8najpRFdX$MsBfj3A?_lU-D7ROcJDYBSs%#KGccqe!Ak8j`ib{X#a!&$5}<4R*GX=xTKzrC}? z3jh_&2a#mlXf&Q%mtNI7Ps%x66|$SX3OKbVU6uqWZ)(%K2}F~tc3y9SUPM4n9)d%0 zpOlTOtGzwvZAAHkr2+~BBNouSWRc&R!#+~f6woZj{d8a6)M=8a5+WMZg1 zLlB7(`0_MaSZAWvLruHVUJ4n!ln-o0F$}84mjJ0t0q|=DD($D5OXRV?Mv%zwM2I{$ zev{+FtU7^3kSf9=hRLZESyun#_{h5-Y_Q2E^3y~6YHg2V!HIsE!_%*>VXc8h%!&^q zANtcaD1Sj9G6Gd*1jP*K0lQ8WWBR-Fj+f#2^>ue#Kpa6Z(+gA6(B3rZP)Hfoiqm0T z54oR7Le&$3GBKpXlHbOQ!}1kC)dEenq}Z!Kx0V@bclw8)aZ1lmZ+#gPRslh=%5nL! zw6t`~w{LVnpg}A^SW53F3H05KNs9W5a{*NUj)dJeu+DN&bLPsGMqUL1fDhP&SDh9;{+SV^nd0_4%l7~ieoGZ?tvFb^L zZdKj@w8yTlE;O9IgoK2@Vq(x69LsxmcX#dCf@EUiA+rWaY}A|jaI>PE+y+@DOI7ri zKY$rw(ix{x&2xj%tppmt-2&~(H_d@~bK!hbqM)1L!p%a1rekFMKHFg%MO^~_6?d{e zJ1$kcL1oi92m+Sx@SD0CQqIE9hhooJ(C3~yQCN)@)8D&y4|}8g`*+1A>m16jmQM^c zhL&DwYQ(5Hd}QwAF--!*5eSLDzcHEnK_eqG6DoIma5dT;^4!$>E-}4AnQ*a9vwl&btapVURmU zAXJQiCD!QH2+bd4=G%gTlz?K8kpoHhV(3qP@1@dW_QC~Mh|(6QrniNKsUc=+-g85t zp`pHz5J0`5l1z8A1W-uYK--bYqTn?Lhe-ioX&Q)HCWSt7+3g$3(mPnQ4Pi8j!@Wc2GqOE_ zaY;Z#Wc=sFZFkN|zT_pCjPq5Mif+*QndixsU`~Fhf2u1nLur z6qd)qhLoct4VImaAc$=Ke z0Saprp`_9z3kyXF7t-564pV8r0}ACu2)tD=3}RZp<#+u@JBwJL32Xx@V(B>8-&bun zlXH?`8y6a&1pBCVYkQjj>HZK}LjFW~=mg5HZf?Oe-iKm00We=d#|;Xkc7J>d%uF0( zrz#*?HHxtR!-t!1KYnBcZRsU;6L7krYep>8_l7|p5(SbW=u9ZI@&Zot_7lO z-xBFlp@VdRX4SGuHa4ZY=>apo!8q1yz_dQ~edvOEZ)|Lo`TKBGmQey`J-5<1R6)W= zth@R5-xA&_d2QOJeBO}`#HF%5-hGLRc9G$g26+Mo;}!@l1^_YuUo`^XL+Jh|o9Viu zk`f7!*QmXAdoejy^x0))o=#3qf&z2h)YKHzl{tSBT~W|f>{pimqLtW!Bw%*J)$`oG zHdbzmC`1a<)SW==k6A_?X^C2 zD$KL|i9^oA$=?n7rM-{H@lqNpXBg+^tJcKlxjDdp=E<3KtDWVT$an{N2gp_P)L-YS z70#A)3;X8gvY;BeySry9Wgb6n01pcvMk#a%uyT^JY`vp}4~VP|Jf6vGeN{L2`p;c} z_JUiZ7%#_5_CC&* z>Oje~`vBAm1(icIcM!qo-++ncYUhk94=Y;q@EHsQp1xNQ7IStkYvlR#=@Zy8Z7?IC z4fq4FQ+XgFn{?7%c2dijnJ>pXC0AO!kt%v*oeNUIefM~Jhz1X@i4ib`05G3sK)o{? zE}$AJ)GPEfEtKa|Z1^LejR1_l-o1MRhz2Vu>fP8Lf;0gZK9-f0fU+vGpO=|GDap| z-t!pe3@+=E-MHZkO6V`pF$-V6eai(sqa}{lVr$Z6dT?Kn8snhzI(Kx=`bLzp6R^!g zQ1CgSNsLjqUzvN2a|~^z2cXh?)z(3k2fa!s#FYTVC*X~L`ylJT1UWyC0$?1yauO^r z=~HRr3zB9;Vf}ekS)hT+g5zs1{NeHE(3NA))C>JaIc{Zlg-_?bS zy!KCtjvP6c;nnl~^7<$ibTpZ+texFmKOi8dXJ=esX(R}F5TmxhH*D(fFPX5JKHxS= zv}kT_CZM6wlcg)JL%+9FnhpxGW@LG3Y3uj5bTHBl(+S98K78=G5IyX zb0gX*v^83Gc3d!v`A)n`HN&B`rGeCofr;4!`K0$tJ#UarMej`|uvTnx9t^N3Nd=tq z#9VrjZm(NI3KnrtP*4MSX@5mU`N!zGUk3#dI#&zqAAATEMdW4p8MKZ{@3X2uKQx_5 zdJ^^uWZYK4-RuDq7X-PB252$Z?H+sJ`RJRP1_A*#1ZWE)sFbQ|^bfZy^yz8ve9vX1 z_e!@<4mPC`JKBAf4d#eGjAPVO7hWM)G67raWp96TngeE)l#EP>#zFnwkk)NxW*O)rL4e)2 z!7m8`6dMflgp4YZw<DY9vS(jCI6d#ZNuOY8!Cx{>UZq z^CMTY{)EL)9znOzK|-$+{4jWS&mA`=G~9Yc4kjFsEJo``CO?85l7u`k0W%L2zYyQ(1CtUbsB8!$27_D-e8dj6;eV7n zq;v>SwE0)lU`K7Qm304Y`LiV~{EYR5=Y@W!?s$|9ZtE!!{=%(uARc5l@nV^cu0DYiUjahg1RlcN3xon^%g~B;xuYy`V`aQQWsQ+m2^a>@zMaPL6H2rcg@% znXg&e)aZ+jW6g4wu!Ln;W;3ljugCH<>d_`>)1Xi`LuvPZF_K2n3(ZJfLudGZh?MgY zYtym^zq3wYR|VO@KsI^AF~BjH1!GU<;pgm=zZauGk$=fkVcbCs6|YtyXyGV3s8#FE z3$5cBtoVVP-84O|cf`Yy3%#-3={qRzwor#PM+@;_AU^xPfJN@m}xNA$dHMp zgH;_QR3O=*kW=)1X9SKN>_6zkn1fMpP4PMA33$I`Lf_58vb#zF2b&FKz5Ns5v+$Gb z6a+7#u1#bYrwyw12J8_qtCja&2)Y$=nAvr|5r*yrM^6Pr4Y30uikX66GB$jE`IGcO zII$@o;KO8<1ohwF;7VRUn+gYzHc;P@r4NKWZS6IEbeVEP2RGk!w^te&n}|ICog1wm z8HzAGrQuXrYWBK6HHco z*wEyRgnrQ2+-|*t#P95a12L z25|8T9`22=@Ag8x4YR3ZEc2CXPh%vb%k&#B!+5>~i&J7|W?79+L;d~z2*5(rlBMbT zdNB=aaAXmPj#!)EBmyy2bdS75Z=3)O%nt;HDwj>`9{cagKhn9P!duA`b&=GCFfoyg zv9hu%Z+Jk0a3V`h%PxS(VRJBdxKN2#xtzHzj*5AY$OtZo&&0%ZTR=byWMs{!iToyC1gJ_tjQ9>`jX*%dlcHBJ&S4{zJYxYl%zJeegv?_?4eHI*2{$Z?<(e`{&z7RxJVkk}`2pqm< zzABhz(x9+Eso5X1Xg@nYKSwSVYwGD)Bc?(#SZ9uY1|qS{*_U(wJhj2To<27FgY|K> zf~Q};g5VMgJ432R)y4M|mFVBLDc{`f@0qPb-#J<@JDp8CC~zRhusf9 zRUui}5O#KN^YRYr*DZX|jy5)DHBJ=v(u7S9MxX9v5xp=PktzK+KvC&}`=DJBJ1qR< zs_+52F4N=0p9tFc4d90Uutf%`6aq_3q0rOGb%X~aKylDSZgBEJBA7}1h1AelfjobD zzV#0JI9RBR2WqNl<~euz-kH2f|CZ&GP{+zABs7_#JTNpw3?`83&|Z5JVsUp1Ua_&UNxxdmPak8fQr>R?8%qG53~aSv7f@_u zZf*`)UDK}fBm;f4u`gXV|DJPk5(LG39XIOe=m_y2KywJW=(wL#n1?-}{NytivcqCO zA0jmu_J3}YV9dL}fGYtFybloj0~XiF*qCPe@8Y}_XZLV;Iq-z2E2)Tl3A?CkZLMgSdY92_JHC+8K1C8kP} zsT46^WH}OG&}a&_vL2y^hRbkqTO>^+MEw^?0s&CXWtO9c_6Z=ofe75X^{W}STYW)3 zL-yKWf$G_t^EL__*a=SoU+W{M!DSet*HS+J{*&l3)d^gk zr0CNmK{5z?0MKwS`@+l05Ec##W}I$+UH~Nc4H5xhqoA-bN#Lb9JkG_`Nvrazh;8TR z@Bb^1#+$R^1Mwh^pg};69BiY*LBnfbd|zc8N>eI51;Fciyqwuvpd*l{RbEgQC_yv> z4#U-oRoI~aFYhep=?*k0Ee7Xw_pwmKK3`8+_3eChLnBgH;kZGK?@$HTefbpymVmFq zi^{&&W#A*@r%qU@JWEemZPw`(xt&7|d75^yd!Uh{nY_fGi0g6jy-jP{WJX5SPQ_G8 zl4#3J6Hlo*-Y7*uB1<`k*(X)YbD(yYp;m*vG2sHeAb$MVFeBZM~pAApVcASpIk+wXU=N#q%$N z#hz?|_jbIpILdBGLxUBG2l1+}CNno0z9eD37mtsL-|+8Q=vlcM!gD$Dg!fU5Y6OPs z4&}zptrv7vE~LB6?#s{kcpQ~!d(wn23$3U<&@p1rs-YflpDFthS!FO4^OBgY@O}K1 z1#B~a=`n%k>?F)=mx03#ZCu@pi=&?uBba+smfPKlKcWktGA;aMNb%*~7n~-bE>TE( zyffS_IauG%>5$>|*Q+_c}M*nK{yj!0iU4TMxW&I7?H|yBanrNKWPfm00 z>IX)5tzTH6P^SG@cK2)XQCpvVjPC3@`s2K+^RYmM%cl&#+gTdVzGSj*bG>^3mvIR5 z)Rq>#(EDx3_z}Gx1+M&ZSPQoc!N8YGalA#%bs`(&i3(0(IMyN0#N>W0JjWxkzG>J+ zl6@#t*|7A-bn{7L@Mq5&!a>5fczk~EYcZwIV?x#TKi}q7Q_8pd9wAnU++JgABXLQZ zmbecUs#I0~ur0U3d9{#k&9o1b%YKf?^T6ITOQx2C^_YKh?XN4M?#^87#y1TN5(Ms3 zbv9-HzU(`HdNM4qG}M+o!CtOO$7!K}DW6H45?n$xMKgx$fcCX%;CVmx(inz9L2cgE zA5&dz%zBql`ku)qq|{3xrsa2j^>yAB{54EUBQp1ThJNucohU($7h$F)N+KVW4V@8cBk;j%ofcf-}<;*L!Ij0^>kmfx%sR6tGJ_+%T>ic zcfTAf21s=fUUv2Dyx^=;aCnOC)GWb9`ImP&sVv?^L&19&X3vQ)EN<-=+z!0zPFr`Q z1xLs9t#E2SApKm#eU0f~!1iYAvvjfr-a*zPxleYSRDYCQQip?QMT4SCFHwd5;NV_xM-DxSeKI@vq# zt#S^94pozgFOKh0A^%4>-zOzr3Q_SLpPlWzZLC(2*TqK3Nmooy?d;;0*pBAROu=Kz zNhul^`|WyIu-hWxz*zkd+fXJ!(&{)iDy5j|VKkRbihwTF)^OuvQxBpzzQa{xHTR=+ z?e$ML8tR_-aI4G9eMtreOgCL;FFO9`swBcUdyu3K+`21 z&4)}rczCw0rL3i`$t&qZ=2wSkx|(*sM=$lY6Vx-1G^pn{j~T3gSsb7ey!gSJReHJh zVhBYOZf?UVHyWt%ocnE(j-~CunRRwDw$tUuwOV1Z)k<5Fl?j-O&)n^Pl!ww@-CAnL z`p)$G*+Yxj>)a}w{8>yH(q)_#o$}W#$tN%2l^(7>u8ypxD+}9Zbm2LPT8f4oG2j2A zy*fTy`cZXhWdkGA{l3}Si%_3k3tR>dB6-~obkxMp-iEYnAsIb^qPOqa*e4kMG+Fd* zEPPFwkJ3fEM?7=PCnIA&a;FecTdVvjZOu-?5$EUQJe1^EA7kUK6G`?hPclzSQ2)JR z8Tz^|JV%7|$M%g{Ox%Rdw$pDD25NU3k~-JyY-LQfYc#HEMd1j>M~NumqImd~kD1g| z4Zetac@Pa@TcIDc?JHV1R@jzcbX!)1>|9)&xh(z|5Av*_Jx@5{{Catbe^cSq!StSP z=u7`?uQ*zT{AN57``&UTUzr$VOJSA0#>iO40gRgX9^8D2VTn4;K)r(bxhNqfB4Lx+ z%;Bx>AmYQ*qX;w2X zU2VL|@)ry5?+tIfmQ|?TNZY)$5=H^|b5VaXroui^20yMNI01<6X$ivD1I@(q=1FbkGc>xjidyU8f8*S*Vb z%e|sW{`6phZ~1VhJ{I*#PP$}9b9!hdg{Zf-L1EXN@x-CqiLO2^=FIAh*ynbtCVaw( z<(NAkCC#x&HsO4C@X%&L@_^9SS}KNnFi6j%57L}lIm>P$d7o<^ zqfy5_)x6L2CrB9u1@bD)a=agt>Gzi0#W7&@4ivH9^YgU865;blGCh0!_-~EdH@INc zn+EiREWLVR__zU%@F-iHjf!spUiG82*YHakCOJ$FZHJBA#O ze~$A1o=;`{>!0iT_Z9xTJ|Xw-Nd7yL$R~jR%lzfv3G?rS`FFzn&yUXk>xJVXg6RnV oM!+TH-v#*BulE1#6}Eg7fs=bH%gMmMn1FgBB`;YZuK(tL03p?z)Bpeg From d6d7273daef61f80f7e4567833597403baaaeace Mon Sep 17 00:00:00 2001 From: michaelchia Date: Fri, 30 Aug 2024 10:40:25 +0800 Subject: [PATCH 10/22] improve docstring Co-authored-by: david qiu --- packages/jupyter-ai/jupyter_ai/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index 3d3ba9a6e..e465076a2 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -44,7 +44,7 @@ class ClearRequest(BaseModel): target: Optional[str] """ Message ID of the ChatMessage to clear at and all messages after. - If empty strig, clears all. + If not provided, this requests the backend to clear all messages. """ @@ -117,7 +117,7 @@ class ClearMessage(BaseModel): target: Optional[str] = None """ Message ID of the ChatMessage to clear at and all messages after. - If not provided, clears all. + If not provided, this instructs the frontend to clear all messages. """ From ada86bf96f3fed98c3e65d36a459ffd114b56972 Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 30 Aug 2024 20:36:07 +0800 Subject: [PATCH 11/22] type typo --- packages/jupyter-ai/jupyter_ai/handlers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 0fae5d77e..ab70308b4 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -41,6 +41,7 @@ if TYPE_CHECKING: from jupyter_ai_magics.embedding_providers import BaseEmbeddingsProvider from jupyter_ai_magics.providers import BaseProvider + from .history import BoundChatHistory class ChatHistoryHandler(BaseAPIHandler): @@ -101,7 +102,7 @@ def chat_history(self, new_history): self.settings["chat_history"] = new_history @property - def llm_chat_memory(self) -> List[ChatMessage]: + def llm_chat_memory(self) -> "BoundChatHistory": return self.settings["llm_chat_memory"] @property From 341b1096740c0152ffa398360c20b4b6b40e8b5b Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 30 Aug 2024 21:32:25 +0800 Subject: [PATCH 12/22] use tooltippedbutton + do not show new chat button on welcome --- .../chat-messages/chat-message-delete.tsx | 8 ++-- packages/jupyter-ai/src/components/chat.tsx | 38 +++++++++++-------- 2 files changed, 28 insertions(+), 18 deletions(-) diff --git a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx index e0708b9f1..7769413e8 100644 --- a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx +++ b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx @@ -1,9 +1,10 @@ import React from 'react'; -import { IconButton, SxProps } from '@mui/material'; +import { SxProps } from '@mui/material'; import { Close } from '@mui/icons-material'; import { AiService } from '../../handler'; import { ChatHandler } from '../../chat_handler'; +import { TooltippedIconButton } from '../mui-extras/tooltipped-icon-button'; type DeleteButtonProps = { message: AiService.ChatMessage; @@ -17,12 +18,13 @@ export function ChatMessageDelete(props: DeleteButtonProps): JSX.Element { target: props.message.id }; return ( - props.chatHandler.sendMessage(request)} sx={props.sx} + tooltip="Delete this and all future messages" > - + ); } diff --git a/packages/jupyter-ai/src/components/chat.tsx b/packages/jupyter-ai/src/components/chat.tsx index d1be98bc9..dbff8b65f 100644 --- a/packages/jupyter-ai/src/components/chat.tsx +++ b/packages/jupyter-ai/src/components/chat.tsx @@ -1,6 +1,6 @@ import React, { useState, useEffect } from 'react'; import { Box } from '@mui/system'; -import { Button, IconButton, Stack, Tooltip } from '@mui/material'; +import { Button, IconButton, Stack } from '@mui/material'; import SettingsIcon from '@mui/icons-material/Settings'; import ArrowBackIcon from '@mui/icons-material/ArrowBack'; import AddIcon from '@mui/icons-material/Add'; @@ -25,10 +25,13 @@ import { ActiveCellManager } from '../contexts/active-cell-context'; import { ScrollContainer } from './scroll-container'; +import { TooltippedIconButton } from './mui-extras/tooltipped-icon-button'; type ChatBodyProps = { chatHandler: ChatHandler; - setChatView: (view: ChatView) => void; + openSettingsView: () => void; + showWelcomeMessage: boolean; + setShowWelcomeMessage: (show: boolean) => void; rmRegistry: IRenderMimeRegistry; focusInputSignal: ISignal; messageFooter: IJaiMessageFooter | null; @@ -52,7 +55,9 @@ function getPersonaName(messages: AiService.ChatMessage[]): string { function ChatBody({ chatHandler, focusInputSignal, - setChatView: chatViewHandler, + openSettingsView, + showWelcomeMessage, + setShowWelcomeMessage, rmRegistry: renderMimeRegistry, messageFooter }: ChatBodyProps): JSX.Element { @@ -65,7 +70,6 @@ function ChatBody({ const [personaName, setPersonaName] = useState( getPersonaName(messages) ); - const [showWelcomeMessage, setShowWelcomeMessage] = useState(false); const [sendWithShiftEnter, setSendWithShiftEnter] = useState(true); /** @@ -104,11 +108,6 @@ function ChatBody({ }; }, [chatHandler]); - const openSettingsView = () => { - setShowWelcomeMessage(false); - chatViewHandler(ChatView.Settings); - }; - if (showWelcomeMessage) { return ( (props.chatView || ChatView.Chat); + const [showWelcomeMessage, setShowWelcomeMessage] = useState(false); + + const openSettingsView = () => { + setShowWelcomeMessage(false); + setView(ChatView.Settings); + }; return ( @@ -219,16 +224,17 @@ export function Chat(props: ChatProps): JSX.Element { )} {view === ChatView.Chat ? ( - - props.chatHandler.sendMessage({ type: 'clear' }) } + tooltip="New chat" > - - - setView(ChatView.Settings)}> + + )} + openSettingsView()}> @@ -240,7 +246,9 @@ export function Chat(props: ChatProps): JSX.Element { {view === ChatView.Chat && ( Date: Fri, 30 Aug 2024 13:33:31 +0000 Subject: [PATCH 13/22] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- packages/jupyter-ai/jupyter_ai/handlers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index ab70308b4..439193328 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -41,6 +41,7 @@ if TYPE_CHECKING: from jupyter_ai_magics.embedding_providers import BaseEmbeddingsProvider from jupyter_ai_magics.providers import BaseProvider + from .history import BoundChatHistory From 466346624eb31df3bdc04bbf220310c5efdaadad Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 30 Aug 2024 13:38:55 +0000 Subject: [PATCH 14/22] Update Playwright Snapshots --- .../chat-welcome-message-linux.png | Bin 16878 -> 10960 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/packages/jupyter-ai/ui-tests/tests/jupyter-ai.spec.ts-snapshots/chat-welcome-message-linux.png b/packages/jupyter-ai/ui-tests/tests/jupyter-ai.spec.ts-snapshots/chat-welcome-message-linux.png index 678514cbab901d2593dcb3cce13f8ca42f6f064e..f921708fcdae4231348c4d98056946dfefef72db 100644 GIT binary patch literal 10960 zcmch7dpMM9+qYI5QBsH%VYNXKvMDt}5whP7g+VEMrkF7p!zv=A4cQw)b~DY8ZDyL8 zMXO}L9b-3?5@s@FH<*U+YCX?$9P9nQ_kI6(kN120ao@MO?&G?z&3XRL-}yVQL|bc9 z5n*Xz0RaIKvn!YF!F@wO;Fpfwzknx0&2vlOwk_1&)KGxfb$C`lKsm|m@+HUckvVK4 zN`gL8$=9KxX>}av`KRliH%;uGni(&gC_3@sw{XV`A=@rJIkfZA&HXUJ0|$0&J9zox zu1ndb#+ScFI6jGx6Wjh}vxE1_<{F*cCv>Fr?b1v4YgiVJ1%vCM8RJa&JV`<3W9cVJ zLxGunzjOX}Y1P7PEO#+_LJ}xDcIJev}nN8=|gUudc2hh(lTLBUG&P z%G#$ZSY*WIap-tP^Zh*yfy33FP51Kk(DCl{jhTHFZQ${qHEMpGzxMqzhknfT5rRF~ zNx_`~Kc#SNx-L03XNY;NQri!84j)=IUT;}wq ztZvvhLc=&Ia9ESa?JKbWtL|3oYxk6nX}Y%)HGx67H}qXnFwk<<%rl5_zr&l#^dQDK z2MoUKC6W-cVs7U`zI;fKi~Nx9bMvO}eJI}P{@KIYzCDD*@OcV1$*?0ijx*W-cfK6W zUvcJf*QX0Aoh$Qy@!3-Ipk7c_wVD=V!s9r!txYuIddnl(`8P{lU=Rg*=(9E?pIf(X zeW|+h*@RF9pU~%V7pK~1Hsx`S6^rkrJZ2In7rC@8Ojq9d6zMb7nd8yCIYq0Sq1kVv z4VC-seI^^H^F!&!%E^bv%(Ll851+_B=JvjKya>c))jj*f4@QD7)0w9cO%v&EZ$oa8u#_pT{D zj8LB4+zRPQ0|s|(`r$pH?Bc#+HL^ViR+90}ao@MlM4Yu2wD*cvaWmN{oU9@hyM|*C{q7$Ll7U%0+xx z`1IDr)W=pimqT4E3uEg8?=rq*bJMIlUvK5bt@aGa>37CfnSVk*N>F13MMpLuYt^^f z!A_hh=Vf|CSL`1;ge=E8i3d0gCV&6jqOxqQh;oiUViaG#6X{xUTE*$*>GjJC&)N&a znV9)3119oYy<7UVEputtGBlSXDR?F!bh;~rB`2L%1BCAH-hw_ z9Sse@9zU~6@;pfu-C_Iq9cOe75yNGo3L?H{_v9mQ_2@Hc$x`ZSP~5Y$)~h7i69(fW<~BIwc?m&wOakI@V%AIS z*&vr@@j^naIR0RmA7+K_Tue#^wmvu3sO8Lr&H-Z$bM~8yn*FlNxgL2oaOh*kiTqNs z`ep@zY%5-#OjYY=?Va(EeY3C9ra<4jkQHm~zHoNDw-8r}bS~)TEj1W<6*f zsibbSG4KP{$}l5YAv!euIF3hzk;V3fiq~^EbBM6T0TORgJ}H7VORRUhds|QVopG_a z>LD%fhpKk!^}tEOkWLNJgDEd#cUr%3TU?|I<9~-z#z>XakVGf*h40!Ri zDJSDN(wX3(RjO6!fXA>-k&?+?{iRlY%_h==D4U;-eXHsj zcHD~&EBP&;OCz5fl z&Ke7Gxwg&Dz`gFoIu)#CVOCi_2XcJp3OdkqItTqG_9h8 zRrapMKw?RvpIa!YoAxaZW46n1O|10lFJ5ItzoARZ6gJ@HVTvTR>20FbFi zs9MzCJ?i_rxYqPbOk^|&7zC`!Za_G04-1OwE%%JVY0Q_`v?vw({w!JCSiFl_H>U3P znf~EuJZ+)T8xI0sKIWFBc#;f}2RM|(FpL~FyAGW6l!e9!$tFj8_RmCdf7(G)0?n_# z^xMUZm%JU20g_4&DEoapt!Xh@SM1k-` zLCp5GrYRvMdCRln4r^eNDE6S$w0kLQ;jyf42G*_ZPO}KcRC<7!DK-ZD)h9o~q|8ls zjDl`Z4vJ!L^0^au^KyYQYiwV=u*Dlrjd$lr1H2&J3AqM5IE;jkcDeERFw*ZdHSgZ9 zOmUN|77~I%C7uA@p;LEAJi|M0kp{qL`T4)C)mcROm>k)hDSn)HzsYjo2}kp{wfhn6 z8LB1s^RE;G4mu)f?vR-#QHgFwxRsH|Ppqk27AhRnDP8b?;!@{d7c_b{Q#J2jBW5AM z7P&(Fmy0>ab+yDSGW);iYLO%w{QBR|as~`A8?{N-p^Quo$zpW#?f|}L+a`!pR@|&_ zGtDy{9l1I-LD#c2Zy^pez-@|1YyroeB^4tAgbL}|Q;4a3vuZNt&rTGsz;;zkmJjME ze%5y9#?8LA<{mg^Qtrv2fOwmppC4?XYk4&swr$`C5+}-fe=R{tSIJ^9x-_2(SNQQ@|lP;kMm25QW~Rw*`;AwhM~4?#4m!r|3>T77X!#wAx2u zlWum~iiBb{W?wh$y?I78sRD-|x7L7Fd0w*owDKqn))#M;qy6grx$I}3<774-X%C^m z4&#L!wQJo*S%|C+mwsh?3E9$HWqwPKWOd)Oi8spY>Yq}G7Q)6v5^sY;z3Jx>M@KoI zMus`|8)8r%CxTydloK9#yp38xAoW60f=^?i353wnUP@Omg9t+^tjrRXd0*0O{GCU7 z^s{>p8bRICO{Gp@o%fWxvzTNQlY|8PiO9E**fY~xY}((~S0p=}g&6Bm(wZBo!`qi6 zja%zBcvNf%SMIU<$u9Jjory0^U`D$~&4?#kgmji={cKSl=2f_q_w13fjDQ{R0!iQJ zMVf5SSc9$*W%i8YC#W^HG@N;V^7(7O+m-erau?1yGrbn?iYr?1js_O{Q<86#l==@< zUB?-JFMNHH3PoDxp0%P}_wae2S+HB@(Jvi>gSSpYIO<{^S_jYcP)?uxBkE&`MdnEN zC1h*g)KN(9Zc;JN!*bi#?T>%eX#Of|4{yWFB>v1GZJ!|l5~6=h4>s8d1n<|-a+Nqo z-BEWO?cvj%9|SBMv%}GM+IRd-L{nu!FJHZfBH+Vn#dFr%C$#~2sPSrfz@Biz7{=k50se?i)S+urpUj9g1@Vy8&|#Xnr#c z*hO4wMl7$=gGR-~_{kfvX99oX$eivecE{y*I4NKhKz2IdJRpTs^BV3Yxav^q?!B~n z|L~A_Td98J@(j{C|64L)R{whQ4xaQY0NWfHE00~sJN>Y2%v!%RYk!db983m1oOqC` z2}nP{@y^xmmnsQu8LDLOPU$+6{xTcqt5zrvNN_3vttq~)m=`*<&y>COW2rl`^3&S{ zGIL|m+sg$tHx{f?b+a`UAXsbex4e@t+Mk{DI!`3d=T{+}({;lao+p*DKH<#wziX@G z6b7=`c^A%8kv0|7lyqz^NsyJS^KM5FF5Qc$(IAmq_Y3a*_4z8)^!Sofr8|Kg zo384P%9JLkMFTrd>tzOoVAGUZ*hI;;x*6qIY}0YA^5j?!u?Bw|KO3Unc-&^9gJhL^ zR#q>v)>RE|`PZyTN?K3h&F2$p_!YlV-wglCgnFKd1KHA3dkEj&A=cv1>SDWkf_`@5 zJegPBT}&eXV{U+PwX+gipHpWM+|HNyL6~ZR>v)mzo_=QLBX+-@y!iV2T85Zx_cO{Nb&>Sbo?$e6qAD>Uly?F-3uynwumc$l$6W*Om^Lpkfx%vV9@J9SqC$tPeV$^4{t*F%dO|FQ$52%O(A+xH5Al zDbf`d*oA;n&-F?V?l*S+g<&bz0uI&T{q*0(nu;Ryfr3{W`MEdUjXziB~TT*14DUmCH@H8rbkY3?j>5e!mfS{d3*!ZRTF<7dUKhTK4=*@K`wT%wa~boqXOasHy*Y?xmWzFKE{(UL;+o6P}9U0!q~OZ8{%K1au9o3arjQef(ByssgTi0!R(AsIlu1 z#Y!}B_>G{b+yG73!rS}ng9FEaI&mZ00CEYiw|W>B&^f_h^YRB5cDBs^b968MH99lY z>`Vqw!x+_;n5A8cr%W7CFp)7DDcCYc`(&;4=^E$A*fIKY4SMMjS|%P z$YtMJE2#7I3d5s2jT6!KwX-X`+t#WQ8e3bdZE@9J!7eCKJ?!*3Yi`_A8^vw9P+hIw zL%(AhpGWlRUB3|zcN&U&_kaaxX#qK=PCmDwH*;8pa?gcayq_G^D*jRac>wRMevn=|q-s|bmrHdZkk4#;d|~l{)hpR%sXR{~Md|gMVpR_4 za_PKnLz#P|a}J$bhHH{|AngO+lFL)51&Q$>iKmIika(r*ogmX-#?BY@-mBwK*F%;H z$!DWhhkXdsiilxHySYY^4eR6ZjuN$L4T;*gro?^F*niA~#{o0(3b_=kwcT{96N&~X@Js#=icByKEcULmwMAJ=PMEd^$vPXjVK6?I zG4)9WoA_)kAIX7J(v6B~2b*7ZRp8LutTB<;p#-i_sN`Z98c^wA8h_O`+7}oyytB~o zzA%V%6VKrH^s4Zi{Z_PNcvtK-=06H zG>79@&UAMHqqid6E+~rAI1Q9bM%I!XR_y#x_Uu?=D9DaMNUuOl195h%xOKI zL#nuAbqpJ>%XSJ$|8wegJDO0sQs=spnd$|^71imp@bN(QUYWBUV$JuITAf2YaJOrF z)1%}u%TI4)Wn@UYwagNcfLt6&Jg8m3kvSV!_2sg$kr+PmriVpI*u*DZNDZ{1q=HqS zgp<77nbgm}FXjGZ3gT5hNap=;4S9Vs^p^cXC?`YJ)ujsyZw##3>#D25`8ugVQcCmnCz0ht z0g!hwkLaHpt{i#u@mW!|!zv@30kAY@%B{y2XPLo(nm_a8HP_!H?$yEAj=+Fv?J8Baz0_B$*8 z*2=oK)Wcp_G<(uB;)8O$Liw%+`c6~l+?+E3%E6?ZcRwNveP@Z&%82Fzx$wrKk75r8 zzT2KF!cTdXdh|1=VCGm)Hpe`eK(=TYdF7Q5z(jtBVjbm;g35B!GzLgYryA9v=N_#7 zR^Yf@1!hD>Cbl(E(L@q^4ifh5SGwL%9 z`sL!CJf=gKwRsJ5?>Nd-GKGClXw(>ng3TT;fZX%n8aT=_3{1&@H>y4N$IUUeyO{D4 zS^1B%qpeUPwBw}6T4BojN4=@ZN(E8XKu7Sm8+aO&9*Z`+ZQB##H4~F>~!Ri1%)%0oQ~T2HVu zUDv5&fgunOw{725Zp{*Vf$IaAg~RYaN50}_qixu;&H@A|S)KYT z9hOc549@@9&-6{4aQBI%zuSu^;}0YzZ)9EmLw3?ZHez01=pqUsh%iOH7D9re&+mo< z{K`2mi4pbO;2s%}Na-_KRYK!+DC=&ASLsdx!kkXP zV8_&ye?naVLk$Qx0K5P%_nc7Q5W7>uKSbDNpp_Qc8hbi@( zULE1oj@q~Wy{aew2%k(;vP9iCe1!#!HLPp71W3YXU!5K+@3I7YkkY788`f9r*G~?; zVq=3k1QQa!gE>TK-rR3Xg}{zX_9V*9^9NWKbUri%_a+`87)HQs(#S~ zG_Tretw@LWDq(3<06f%gv z$&#vulQN^I?ui{$`plrc{%@V;OqF$+HR1}B?>o*OzRdsZxmeq6n6eF2Zi}DBng=Jo z9--w2+-ko#;1AKZc)xt>y<#!iy-e1f8r3N5b^T{yhfS8*+F;PMLwfX~#?X(TY9^Jd z>wZv`xumx((9*}m*#{;ftrPt6?#GW!v9Tmm@xScx(tvV6;Pb^Hu1_3o<1bUC_G2MGRB&u++;B-3l=QY{;Hh*S7p`Tf9dBd zUo3PeCylzgZ(FlyE!b%IVzk_T{wC}*aLJ~VKD)jeZMdeQn3r{ETHsGpQ*E0BM9gL* zn{T~wj{zeOqN5sD!&ff3ZsFT^$z9lNPB{K7NzC3wvWhcUi`T3O{sehktWrnhx?aK zKtzRRH?(%X3Mf#g0zKV$l%GVk2yNY99ZdnrLBpGzj zc7>ywp31MnhC(ka@r_N5T&K*qa$5Uv<1E#|Z)98epItMXxzV*{m%FXxE6)*?aqYbf z8S6}T!%k*$kdKL%gVg@IW$3QW*|Zj3d>~D1xAWF{+7!vCj-wek#aY_=fr#;i7lv)f zA44(ZUpdU-_qx7f|9BCgZe~h{l)!F5^G&iG)#w|?A&ZbRKl2n7}(Hb$nfM?6o zy%F*-VtRil@3+NOx!i&F38ePt$$4kG#@h9X`(GJ3Lp0i}DIP_t))pJc=XGRJMn!#I zj)z7!)z`4%))1mW>2El=BQP4IoPuqC9r7h`Fwe@TW432=YcMoT?KkQ3V=R){%T~Rq>N0egcXS*HpI}QBg9EujN2r;u4a!fU1#>#~%=fZZiJeIE< zi#Y~>PD0M!&QS8l+}>~AXR@=im2C14pMkr+(-{;Gkc+uv!j)l{6#c}rs2XYRCvY6b zXLWvp2Fg8va@gdZ`wlAY*xk0Tfox*7VP;ry;z-)oRmELCxa{o<-{~u=X zKU6Y#WS%82@cXvtm zx$fuP@0|I~{>|(?XU_R!&v~EG(FYdmzVGXM#V4-iyQc~d3GgWKP$(3^<44lU@aGW< zh1r3N4S(x=n)e<4z_3?-D2dAbMZJVV-9SB-mQZzxTN`uLRNXzp+1$tfnOYX~p4El_ zkp!22P#Fs2je+X-`P^u^0rc(Y1gRx-L1Mr z;>WKgm_DO?6>ec*$3Ma>;jczXtJZMV9AIv3ZJ{T8e0<)%dspBhd?LeC5vp73RozhA z*_=om%}h^^5lBdv+wXeo_H9XTf@fY{qC7l2__VcNr^0x65CKVC_)ZMJE=gYSO`fjL zjB+T{7beBrh@}KUTg(K{J&NT&8NMHAM5Yh6X6~@Cu<+{TXcu><$dSc~dQu7r36b-e z5qWrevc}!5?RmG3rJX~n?|sUTy1~SR^DvZ_pVAHmKh@Ba{P5G4FIcF-Qfs;*ZA<^& zg*L&YV{u7I%=OW76_c{$BA(ds zi=y3`YOI2Sg5-XKzkXfetUckODwTBo9@$pq9a~Gzk zF}=OL=a@=`9hVJG4z_G;Z8L0+@P$8J9VxYLEw&g47A-O9r1Y(gy=!S6PVB(J!C^L- z|LxjNO9&MeO`cYv3HqFh-~0v7EF9JgRdI2YoxMFl0OPG&;wj4XH*VA!xWo%PX7hBd z4CLM91775k5xueIQ^Y{j1t$>jOtt1AJF%HKGPC{-MR=T z8BkkWTQ6`7H)6B1s5G|`7e}3~Q+}=M{yz0^iRH8P9SR4CtFp3kbm04{Lf(^;69ZFIQwJS)clQIx;h`Z4RClSh{@}3sNqncA!Mlzk z&toT4-D?c2OiS8#@7|e>Rq(5_ko?`=E^t^<_O(8QXEgf#{jrUW&1;1;DY%E>qL)T? z6{C(!JSvXZI(aY5XlQ79zCR}RO{^>{tE;caKqabYb>a!t={hGJYcE~4)h@Nt%~-M- zfFgis{ofSNCiM;h)!iF{ zf`bNwObH&C&d$y!JH3kN{5NkfUEJKJA;4|BS zZDqwVIyUwxE>6dC`(Q3oe{y~O!^_^(aMxG6%YAR(zgIrvU}N*EtQ7e`EoA7ReVL2{ z{>Q!iTs>Du%W_32rnqFkT751N2OFD#oBJv=Gc!8BF_17>D)7pqM~|KcoF1*@buT$A z_UAlPGkv`OE-5L=j_&wmyGyCo%hO}GPvhsWU%9_MZa<0f<2C&?*%8fq@VA{41BF9K zs~S+f*>G7crUoA#@CuD^U32q;OOCd-+#hJ~zkf%}M0w#pG!ju!(RAI)yO7Y_&g(im zt3$-P6%O^=U1HXo5=z>Jh72vCbcAAJVh0BY29tF@=-7on87lwu2M-UT>(~7VMGsKT z&CRIS+1YGmm6b*Vxw^On1paAhOfA6_zVJmh&3sf%P0i;1ek*hw{CVet&F4)mEe)-$ zIP~=NgFlRedgdzJc3JeiPaSnCoe1jd>v6AKX(=#jmFI|L9x1V8?Jc)I+F6Pcu>M-_ zmZMdOeR{nA(7}OECX_a0XU73mmzLo5zQ*%7!j%BZIW#QnIzEl?brhFb_l*lUI5RV$ zV%VrJBaAF8cpEjx9M}Yue);)a)Pgp3i+?A|?QxFx)*7E#S6_r4vpQZqlD*3jnv=u6 zy0-S*+B#Ib*c=13y1Hs$ZB1CL9YVsYo~V(hx4FC9od3Mh2HKomT1t-@m;2r;hqYl6 z3=9m-VsjEnDXFbafr&daVWp?6!{ht+pI^+Y`upBxE?mQpNHR{)R?;sZ053%5W>s}{ z^V&$MKapfpQ`6hI_Q=che&k|aRpj^Gihln5N#yVEZ#dQ9&mb(UeVvR9zUi`@k=6vM z06+1<)HDdXN|5XJ{5y!UlB()02=DE`fB!ah#tQi7<>ei0HWEH~{`{Mu^LpH3PjaAU zp-Bjl&`9FR7M9;v$ApPjkTjVT$c6E{2+S)cWHcE|_*-GKk zH!bz1H7AFPvg~b4a29Fj{PS+()s+W(Bi13g$NLlBoNhad&$&AO*ip<(P0>$JPs2mL z8mmy-*_fD^+=5zoS8l&RnLSWuJ0q>6^a*aEIfyLC^sB23F1@t6dc@*jLGr=r{zUi! zr%u_^U~+Cr__~6^2OE2P;>V94V{da6>mKc`vBE5Af)8kX?rn}!Na{bOTeA3b?OB8gk@YKZ6R zwQCF$ zSM_O#85uuxFld|TqPyNhL_?~@LafZDPMUUcT%b5<(#Cu=x6UW59E0Sthl#^Bfv#_s>tZ^NXuTroG^74|WIiZ4dt%3yO%` z6c(n0(!sHD=3EgwKP3F>xi@5&X1wl1A!u_$M@NT6CpYI=DZ}mC*iKGP}ZCn{$^d04kCW1*bZN8eo{zFpVZNdT`z5*{@=RxDt$zdjbl^&B(it|cBk zlYBJK#k+U!qG$RC&-QOVvylJpzP}FnUI)QPumQbP019<>*yYV2DoSf!ObO4pI9krD z5GRNcF6%$l7)T}G*wb@$AWuIK{;S`doqbXCvKQOjaj5J2pFgTa=6!f9;j$F{6^<4F zJSc)S{)Q~g&ia{j#!%c_hrhshHTGKg6)$CGmj1U)Cb=M;v$(unP3=hxpVFCrngY$A zTwSV{FJD3eHcr<2a=#kn>WC8}hSR>o!O;T!L~p(5^OrB3!yFLs9GhsUy8!6xlhym< zSSS}4m%5P=ss!&dp$dm3!yO@b0La>a!ootWl^iBNaW}V$`jUcz0*N3}_TZX+NnD*; zFPhxkTr?LFySI0)-T=fjUj2$d+x%1Um}cd}MtJhJ56#qe;TMgurdni{N}58M%@pS6&!17KG{uA#@AKmc zo|zngYH<7FaEbY@1VVj%{j&0M=1{TY)k4iuD;k)*em+<@ERBGIrdC&jK`ksj7$vfc@b&yFH%rYKx;a1pElU-joSdA}4xJP{~i1=6>hZ0wPb9kA8FYrVIigkdR19N}_}5;K?8^3^#6|O06e2w%zUI=iIYAg$*olw7+(wH51jeVmM-J6i z-m^eLMrO1&TpTW36b;P=Mi_Fg2Zx7KP!fYhFICC|F%W(M_eMlQ;tveun^LljjiaMK zG-p)rKcTzvXR}TGn&j-l!ey9|TkV{+QT#P$*_CG<_O1@X@ry%+X$R9moY@6YgefN{ z9#b&ze#Hr?^ad&CthdV<7=MO3fQ(62h!~ z*8@oQSC*DN<@To8+QsO_OX$FNvbgl-ZuD={-R_Ul8A)FdZ%B-+$t};MZTUP5{dBrd zLqBJ!`OZM7*jf6Cj@*1kf8w)3YPDXVbrbp~>mUw5m?=rx|&F$?*m|Yl&qjoQ;umT$1C0 zjvUJL)df>RLgD}bbO8{DrsihmIUgV`lDI(NE<(VNR&Ho$h%g;v?IYf7sB$S_RO)$p zmOXCKEI9o0%Y9ko0#?+Jctk`*uS>_>E*~BqqGw2Q-0kdmWIj+|f)YD8I%44A`JI^9 zH2POdrI%;7BHSd?gI`313JFu5UiIYMod4UmxPZ1Wq5U-;ZZ8zrOi2PDG-~j>B$Ji^ zb0+Pp8y-B@;&3tR*>0Zq6cCv?4#k%V9)B-2G&CS@05wy!{YwP9_^*rAKoHo~#yjNt>9Y!dxK@yh3x0mX>xJ;I$-f{L9~bDu__EiY zaI0Nm@q7N*(#k6L9{^b4kb&Sk0qgPWz%NLwt9K)`Rzz*5rMZn-@W1t#`6;9klX2=@ zhvp%P+um-<8WWK*ur}sAp@#n8&!t-B0 zFmMQ0nl7F2BBl(H8Le=X3#Q7aCcHXShyzFOr=8OR!R)T_boaHD50}ED{}>k+%qSN@t}_!K8Hoc>4wv2rm_bU# z<+5ia>OUX~BoKwLurR;iV0_;)LvFgl@IBwfV>-bFUaC_~H;w z1H4jy{~BlvT4yAWvj`IfybL9-pl}6bp%*_K`lv}P2LPP=E@63VT*jP_#MSTzJnkPD zc=O&pYUndSZ<;Q1R=w8uI&5A{n}#wea9YzwdJl{SESvpj%3&2&X*-w7ers@KsIrt* zR2TxeMEFC64!}A7I}(4)i_08WBAQn;l!VM4BdDo8f_-bPbb>PeLx!VucOr&K*VhoB z6f@AMT$WNI^WApM0n;{5{~datYP{^oTCE^{IG2@?kpXorqpbY7CqLGn!4dE~?xjoh zFlVPGCtr)5pAh%-^cas5x4bYhF$6#k{EPsRbs%X$jxg95ufCm-r7xf zu(Q;Q?Ht4&8WeQ-8Zj|}EZ)Np)bHR3W}wv&NhT&HqTmZkqC*K@M=!JWYiS{hsC?IL zli9gB3i*NF-rm_UDfw{UWuT+<0s>?k)%&z|c6K-X5a{J~Fojdte;R9Dqa@2u1xO8! zYwlzH?|+A)P4zG{D~lB-K9~KxJODmPT%YzxP9$`wbt?g*Kp{qBW8-fC>k$12kXMD{ z3JZ1ZcgUng2jMxcSzx=Q`U3`;4t=Ua`59}Jq@)3W_g{^a5CE8j!sgeMu?WMBRi3ln zfsV5E4dz!RzeMG?-fG+TPw?l&~WUbXA1^Lt%8p@Lxu4{+8o>4)b$yAde0h z&HGkb_l_Hh&@(}&0c9Y^X8g#myQ4!MMpT5m`*Y97YYqOFuToOJ2f)B*(T@+O-LII7x|N?PX|l@tu+JC|9MZ+1B9M|F5w%&{%m^U{7e9}wGhx!453PqnLGtk z4FF1H^x24YZOFyTqz1qz5gq}}$aIf@5osb&v4%fC(t`v_562V;O(^KY7Sxk5Xbv|) zMky&NA?G&qlUOou`!NNqHt|6aDYkKAYKcW-00B~MphDrm2YG-v{~vo znGs8ox5pUs8Sn4@QJD*fo3-VX)i?)S#Jmvbb$;sXakP{MXOYmCYK`{vHl|i`tRzdx zVp52!n{E37Q>{B)>85-X*SC)9{j4v4D%Yidd~^e;k(5xr+q5&r9|}2^Key)PL^!KP zrfUY8jD*Bde{5$$RVe4I!b{GoJ1RatWZ|p(6%esp=A%sk`>7XtpP%7z4u9^%B6hHF zBN8^$u4HrWh~k#Tx@eJe{wrSO69}&mu@o6OB|5(~tzH)%g=_OV7fmw|u4*xmJG#=a z%h*DAd^$*bL461lCcwd@FCijXf#AphF(%{CsRLNmU1+N4us%v*U2~XP&lr+g5n?q~ zK~n9$9}pT!hzJTGQTn>Zu;8$V@{1&@|Cu8xa}U|xoNC+xfS#n7Ac~#xi6q>_>u@eo zrrl01C^%Sk72a3yjdwTIYJ}EsFGJhZ(hGfA=)717q&)>+TQ&g@gFjmbS7#% zAHZohLoP{sdDTSmz5ER9;uqPoaUm6?L4pidZu>k})Y7l7|5*f>{in~L8KC%@ zetx>y4EfR?e~w(!Uka8~k+<;#|SCJqh-kYXf;L^X1Cu8vhYYsi{(GBbNc zX<8eQmp;jEW%15DZ8I`L8B}$FGtBcQ$0~sKb!T6htko0=+euDtID&y{a7&B!}aS@vgB{9|Hu{rvf~!77uw z$l>wxm;HVD!#MSzdr)bx&+ z+ivy|LPEhj;wj_M)W703)o>9MUPR$WDhwuVXG|@>FjAg<)S%?t6+M{50tws@C?eW= z)D33O)2u#(h5$7U{_2SHn)bfi4H1!NmmI6zcEM1%0yGy?E%fSc9!Fo#R!I>$TTyL7 zijY!o1-wJOHt~NMi2rs&?r}+YlUGK>1dU0tZg_3ryXXBe>ke^z#-4{|(Z(akG65v? zDmnREko>6d^(!2hLI0K4lb0q;!QhP-IW0& zD75_2(!BbV7cO25XO`gWtNxK>%)t)r405&= z^n%UVFN)J`ObJcE!;|h=Wl}9xxo&R)p*H+G+XiNdBqVjTLVARQEWa@b--1r7pAO*=F_F|v*cPvqAD;vwo_x}FP1*$f}oPkBioS&WAPB&db zIj;<0VPc{k_@nQ+@}xeAHF2O|TWiwpaVjm6KlD-V`&;_HA|r<7B458{an+S<#&)(D z(6B)+9xSs>25tjTpN5^%^C=MaG zZ0_dfCJ~e3SJnBArKOCpo|xq@?F#pFAp|JF@MQ$of?XzLTuHE6tRs3iFdO`^mIH;_`>p%f2WL1PgH@*PkHRIdV3JvvCtXB}F z@ET2Mrs9ylVb-+vvI@J^jXss}VqiT6LBGbxuTserdgJSR6+{zYpA?{50={Yh)pN6n zSP}6<@{5Zfn3>(7^*-6MvtBmfkdu|y)zyWdU14Hk8XOv$N{Q5ygehRRv{dJVwFTxI zv*Q&QkBs0Hs#poo$e}%1hu0$_n8nO8i(@l0l#N0%aFXSZ1R4jnXLde*1N$UvY+m}SQj-O!jp^|;wZZ~X6F0AgTE3?-H$CY$uSF^qFHB$m!{ z?O}9$eZx=sRGAYCWH~KH^*dDe(eES5LEw=|i;jxYoZo1r^QIQ>tMI|hFHZN zlE2xUrIE+Nn4DS>pxyYxxZMWW?yHF!8i1S^kZDO?rS6MO&;ZF#L`@xxXvZ08Us;^p zB5L;LEce}&6rO$gS0KcV#?(Uy3d1ti!%5ifU7c13eIQ$~z;FXWix>t;z~-h6N^J!j zPjj5v5r*yoScB{_I5wpx&M9VXTbyTBIT2(x9HY86rpmg^ffu$`A`DVNawFUGO!t%wNm=2}sP+p~p2>xUK;Z0?L#7EJ zpShYnDuh;B7`R6j|MqOJ4*NbI-}MQP?I%w-1Jt5>FYZ0pv6fpKs~kwQ`X;QYJnkmu zu)#m)L*Vv+_Ro3@;ANMUoYJs_wai|m35I%~B+CnN+3dd;2CErN)5d`TGBABxVPOP? zsy*GK0!O+1WbgC_Z-4n+xDqf1{vzrA{ioQ~snZ*eU(jl0~Q%vDFE!Bo1m*iM@QdIBN%0;S(~V> zRTj?73z-1=1_D_=NMX_s9z5TitOqE=f8Xmw5J@Z`Z}{X~C`fMpD1Gqk5z!q$W)i6J zE@y`e1RSP0i>F$83&X!F*P-b>Ffh1LRaHd_5Vw@MvdZ(g1Q{gYNvOBzm%NH5=Xvp#DCrfMrpk(CW>p_Kc+GHf zxBGK-Swa4y2WRl9i3cGe;Z-uSv|urdezq+T-QU5j+MXQPPR-BX1_o*5a0FxQIwDC1 zan>MSGy^B6B3P=LxLeMyTfhv%x1NOaOn}A*h*j&=Xql2(u{HoEZ(MJ0@03|hTzc>@ zIgxn>xDGLDHOp+ifl5jW-3#rMxp?|A+upM6uR7rL|5*l=O&k6_VhNH7LR=-2dvN0w zm^k#R?!a52$8PVx>3dV-b?P>^0T>2R>W$|RZ4@vZ!B<{@V+CMM6w*QitQ@R#&iktg z0w}~M|IQ-3oDA3#sc^gkMaj$F+u#EmI#3=Z(up|9Q$)navyYCBHl@f#hVAMVn}`2( z0d*5xKo!wMKcZ=fIbtdwa=C0ZlQn~O1wJ=u6bQmEFzuqHmmU%b&e;8W*D@vwe%IUO zX&1`|(}|5Q3SXjc($M#0gs>-8Edl@K3?QVl;`#vm7y%Ek+YyVLp6AhyQiD0mBEpgK zYK}5{T0x0N7>mDpmRi8tc=a=YRTRPdK-@3U(b3WS5dps-{`Tz~b||M_mF6Wkb93|P z0iCSA_b{B^!8Qmky?m`$=Owq7MlHctfpn5chK*UNq{?3fRC*1OPF!mHZe{Unv1tV? zO-`aF*GUvYDETEpWRGCedINNrWdS2}ej#%6+LNGV|Ts^_m-3BHp4 z^d*$z@*gE(yNdF146aq`n1~4MXV0GbUeh{-&4`rx=H`oC_qOp7rV4umGm-jU5>8Hc zL2y$sshQ7qHby@K9n9X(&+iV?3DWJMVrUW1pFIxLE`O*YaHpR+93!;(WIN%UZGQ_{ z$+}J$&0?$|C&%kTfNK0z0ksK>VCd>gJJd8Zwx>sq;K8EqoS(j2KL+d4aj?*o&}zKu z66k7RRfW4-6jJb-7&N~pAs2CC1ECRVeZUvhkIN(9tGWxQfj0?w8ySbN8zIxmz3T!d zRCvuaDzVQzy!}~wY+8Y}l(93{(U4ZQx<1!*lxH*2oZ5`N!3KEm| zsnj)LJ&MlQDdAQ_L#5kO-*f3U&}4n+b{gCj*j=JxD7ttvJh)e_gYJhrej~l9uTyN9b|8(=A;nsAspyTp&KcYK#?$p5- z!t5TYgo2dVi>y}sHnRcKWU#rwP0MZr1(`zL&4crb3aq^JtUQ;u0*CP~ftnY+j7?f|+L z(t$$6X*D0MwF#cT0l*h1S57`k0zhj73fU1RnPEcU@Bd-V+FLa zk1;WLlUB^DVDMV*+)Cz76&^4FF%g_SG|n018-g^$MXEtOk_0;}n`r{S< z#W79o*{cVs74aR?m69i?r{6%C+&Kpg*B>Ycg4alLV})Qxr~q~o5nUE?Sqg4*6MTqp zk(!L$R02xAIv5KDW<3nBY(od7Eg>%-T+as{B0e-lgp`6q2Q9Y+Qm}J9)+*C}4P-|Q zSYHDN%MjdHSoygO0;ul#4x*$X6B7J>u=Eh=f*br2l(Y5gX|O469=ULCYVX?tV5|pP z3u**mr>;!;Jb*ER`|f*e+q@QzS$jqf3`ZlC8#T{vZqJ3=N?)7I75Q}GAVlGo&oRs5 z(MtQqzWd9E4<2ZxT=M7t)AR#TQwvSIYSOm9XO`!^@e+Ig?p-|~Hw!oJx0!yh8Y^Dx ztdb)ihz6byJ4%=|B2G;JRq$n{5LeRA-@hJ~2VhIqWg*@Mp4k{Izyjx1Jx@Q4CEZ*tm|sM7 znY9G~W;4VF0$eT zn^g3$=0XokyiI@S+6gk}K*qfd;yE&t{u!ZFE(PGj5`&cjTT&;dr{OP2Nl4xSEz@$% z&G_8@Tb^A`O{BV|wKWJC0uVT4sw3m62mGF``B-a~(Bq>cgh?95Kr&;id8%&1dLTkq zc3Y1Ge&`v?Ny(M`90QIEj>czzlT2T|&s^s=-v8@vRrI>$YCeGkNPwor!ovE;`fV-N z&OvTU-(w3n>05wR@0}++H9+jAlpl!u*G;{IW!`I$jOYpQXG!3`_{o`gc$Aq#Jzv zKpffEt6)+gTLWMdhx^2^yes-1ZO~@?M@&&aBKGT7tEVjHMn=8>=P$uMx=lBvz&coitS8`+WJn+*ROi1>?&U|1%oiY4UGBN@6`H1nB z-wKk^&=2D>hox(Kt3$q^x@#E^wYecou~iA@#G$4{t-#u*}pjg(-9gREOCWVaYG~1Oqu;rlrKw1$q2eEsyva)*m|DoO=>7}^f8?b^0OF{-<4I*1~&=OdS zS6|$(>W|N6U}nAq`>BY8OkkDrU>`@1OG+)Z&m3vSeu06!v-`~b$tvN9Lg)fZP(840 zmaSV^l1dhcuGe*Dxb)99^S>3j|K$R+W_7xMzO!LzLW_@T5~vT{c7S+AH$28(C%ii7a# z>CMY>HSKkC9Z6_~_ixct{pqB)1MfS+7HD2#8Ck)cwWQ@Ogs= zLaviiJuiD&m6{N(o@M;=UFOhS2LmisvCvRzEK|{xjf9C$jDCEZsiCBF6>|7BI({+t zw?IWkZo2df8b)WJ;lDLnw(v!Is#W@`w5~f~$1a{3yd-6cI5%}Q9`90HZqCd;w7JbrSE)D}AjQtFhi|29QWg0oHc^x5 zX*!9^?=(^2yUwpCL`-c(63D1}sv8qEezS&FCg%E5o>>)7ZKzj{ZY_%pPv$JnV>jLw zU4J8_pQ6QAyT`E?)mJJ+YcX@7Rondgc>m;a;gHM9c^Yq;zWtw)(s~P#FYNl!*E*s< z=s#mxb^S&&YVuX`+ulZkJNCVPRPPaAv(e>W>MtpN9+B_Jnkm(i$~ao9uFsJ1;$fi9 z%?zz~wmxBmY;K)f-Prx$T8zm;vN>cXaN_h&nXP4QR2O5SGtR}T1NKrPot5K%EFQZV3$NjB|Rs`QLyI1HO6ZlpmZQ!LqnO_cDN{%k^9}2r{@7KUiJ(edYnxvqPts5_Exw$E~Hu) zg_E}@xRMAQgcG~oU;fGd=ed)EeYQsl$yZ(Xa&-GmUhL}6q9@!~IaI^X55%Z2Q3?@w z3FGA!{s+7{sCXS3)adC)P1ie(Nv@YL?v*cd2CofWAbz8}`AEh6`vh}C64pTe%E6jl zg7d6h?%#7NA?<8I7n;inR?oS9%_j@6o?0HR$SeBRu34)7WH~s#HM8v#!fmY|^CPfu zrF6uFBcI#N*`{Uui!-{T*4S5jaobGSrAx#&w%j?5cVmT5Cu-nQi22A3-bf}c)*y+z zH`e8|&5m>CXrX48>V>%JkA#@gn=bW!z8o z7idh*y^^n%Wss%*?L+kx=J#mVuXR!B`<#+0C;Jr--1{02^g=LD=6z%K6!u5u(;+q9 z3Be9ORFWUZ*G@PwE=*K>qjA6-+@BW1pZ}u!GgJpm=2q)Y66 zvU|=ny+ixbJoG~+O?YkK5r0#kd`R=eG9!uE#r|efLvt5ihga*bzqB24sjuXlJfd0^ zvZdnma3YB@^r1aZ_f?cB;wDeq>1E4^e$>UnBe>#~)}$C9jPZS(xnXCF`QYr8Q+e+l zp~~534~Q^bJPuu7kZoqi@o&9}mL;d?##L#IrNl2(!=idv(tPvDxoGeD-16(rGO-T%cxdumOsu#^|fvSKFRTvt$Fhps9aNvss5i=7&mA% zyLzw}@<_y0*bGdQ8|+C=loL%~35OT&rBVIUNi4ng^P3AI@FZxK$vu)I9d=W(#p2pq zKAUQ@kdg8`{E!gaa8uJKZ`-1(zIG+9tznc&=lL_1y7iDX8yjUDI3ZlBV!ZW`c0Fbs zlr{~^Nd#+7;-kcpc*-p7(f|uj5wjxUxLRlkclGMa&&AHhTu(?F=yHO@OvT)2MZ*}7`^Ih`PsL)Ide$8W3Hdp;7SpzVC=cmOK{!nqA2+vO6V1mUu6RI^X8{^ z?=#Pdnl>zGY-ww%cx*|E@MT&Cb?C5P`P#1jVJX8-wc!osz~|DBzs-x`gD3%ApDp^z zJ@#{iPGgMZLtDb#)-&mNxvcdo!Xw|(nlRlK@P|(v!2;uRBO@c51ZR7jf$GoW$7jNJ zc#eW@LS((pX$I|G0t6p4cJYcasR&ICTfVBEcrX$|Rb~rn9?sY&p(k_Q8OJ(UW$G$T z34UGBulZI_A5A9?-zCHeazaPti+wJ`;TS39&(0`XAGHDne#cI9v*q;EOW0)_hqQv50{u{X`D`s zyr&Lhmwc?4R9mnxi$%%NY>JJm!h$`$Z<&QAO=B4?c*+z6ayP?0>Zyg#p+Jcw(|RRv z>vi3dSbm#!G&W)pPzkGN^7R?GMf^!#xkR7XyOh{t_7QeXfhdA8#3C^eDkU8O90oo> zV<{*k^bOc+K7{(fq|~e;oS;H<6+XfTzYyikh9NWNvwJ6q(qP-DNqTj?xl%V7`z)=O z0-yENn}J8MjcDauzi-Aq5l>=ZCRbQ#x1hiIrP>v@m-zRT`m~PyO8QoMTwCH7&l;0R z|KI*>)_?gx Date: Fri, 30 Aug 2024 22:15:51 +0800 Subject: [PATCH 15/22] fix not adding uncleared pending messages to memory --- packages/jupyter-ai/jupyter_ai/history.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index cea4e6c2a..806853ce7 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -21,6 +21,7 @@ class BoundedChatHistory(BaseChatMessageHistory, BaseModel): k: int clear_time: float = 0.0 + clear_after: float = 0.0 _all_messages: List[BaseMessage] = PrivateAttr(default_factory=list) @property @@ -58,6 +59,7 @@ def clear(self, after: float = 0.0) -> None: else: self._all_messages = [] self.clear_time = time.time() + self.clear_after = after async def aclear(self) -> None: self.clear() @@ -92,8 +94,13 @@ def messages(self) -> List[BaseMessage]: return self.history.messages def add_message(self, message: BaseMessage) -> None: - """Prevent adding messages to the store if clear was triggered.""" - if self.last_human_msg.time > self.history.clear_time: + # prevent adding pending messages to the store if clear was triggered. + # if partial clearing, allow adding pending messages that were not cleared. + if ( + self.last_human_msg.time + > self.history.clear_time | self.last_human_msg.time + < self.history.clear_after + ): message.additional_kwargs[MESSAGE_TIME_KEY] = self.last_human_msg.time self.history.add_message(message) From 9bc935f8c36c7ec966d53fde96d22e1f829d51d6 Mon Sep 17 00:00:00 2001 From: michael Date: Wed, 4 Sep 2024 09:39:07 +0800 Subject: [PATCH 16/22] reimplement to delete specific message exchange --- .../jupyter_ai/chat_handlers/ask.py | 2 +- .../jupyter_ai/chat_handlers/base.py | 33 ++++++++++----- .../jupyter_ai/chat_handlers/default.py | 4 +- .../jupyter_ai/chat_handlers/fix.py | 2 +- .../jupyter_ai/chat_handlers/learn.py | 2 +- packages/jupyter-ai/jupyter_ai/handlers.py | 38 ++++++++---------- packages/jupyter-ai/jupyter_ai/history.py | 40 +++++++++---------- packages/jupyter-ai/jupyter_ai/models.py | 1 + packages/jupyter-ai/src/chat_handler.ts | 17 ++++---- .../chat-messages/chat-message-delete.tsx | 2 +- packages/jupyter-ai/src/handler.ts | 1 + 11 files changed, 72 insertions(+), 70 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py index 79736f2cb..5c3026685 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py @@ -71,7 +71,7 @@ async def process_message(self, message: HumanChatMessage): self.get_llm_chain() try: - with self.pending("Searching learned documents"): + with self.pending("Searching learned documents", message): result = await self.llm_chain.acall({"question": query}) response = result["answer"] self.reply(response, message) diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py index 516afce6c..b97015518 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/base.py @@ -270,7 +270,13 @@ def reply(self, response: str, human_msg: Optional[HumanChatMessage] = None): def persona(self): return self.config_manager.persona - def start_pending(self, text: str, ellipsis: bool = True) -> PendingMessage: + def start_pending( + self, + text: str, + human_msg: Optional[HumanChatMessage] = None, + *, + ellipsis: bool = True, + ) -> PendingMessage: """ Sends a pending message to the client. @@ -282,6 +288,7 @@ def start_pending(self, text: str, ellipsis: bool = True) -> PendingMessage: id=uuid4().hex, time=time.time(), body=text, + reply_to=human_msg.id if human_msg else "", persona=Persona(name=persona.name, avatar_route=persona.avatar_route), ellipsis=ellipsis, ) @@ -315,12 +322,18 @@ def close_pending(self, pending_msg: PendingMessage): pending_msg.closed = True @contextlib.contextmanager - def pending(self, text: str, ellipsis: bool = True): + def pending( + self, + text: str, + human_msg: Optional[HumanChatMessage] = None, + *, + ellipsis: bool = True, + ): """ Context manager that sends a pending message to the client, and closes it after the block is executed. """ - pending_msg = self.start_pending(text, ellipsis=ellipsis) + pending_msg = self.start_pending(text, human_msg=human_msg, ellipsis=ellipsis) try: yield pending_msg finally: @@ -378,17 +391,15 @@ def parse_args(self, message, silent=False): return None return args - def get_llm_chat_history( + def get_llm_chat_memory( self, - last_human_msg: Optional[HumanChatMessage] = None, + last_human_msg: HumanChatMessage, **kwargs, ) -> "BaseChatMessageHistory": - if last_human_msg: - return WrappedBoundedChatHistory( - history=self.llm_chat_memory, - last_human_msg=last_human_msg, - ) - return self.llm_chat_memory + return WrappedBoundedChatHistory( + history=self.llm_chat_memory, + last_human_msg=last_human_msg, + ) @property def output_dir(self) -> str: diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py index 02256f4eb..a51ef29e3 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/default.py @@ -44,7 +44,7 @@ def create_llm_chain( if not llm.manages_history: runnable = RunnableWithMessageHistory( runnable=runnable, - get_session_history=self.get_llm_chat_history, + get_session_history=self.get_llm_chat_memory, input_messages_key="input", history_messages_key="history", history_factory_config=[ @@ -101,7 +101,7 @@ async def process_message(self, message: HumanChatMessage): received_first_chunk = False # start with a pending message - with self.pending("Generating response") as pending_message: + with self.pending("Generating response", message) as pending_message: # stream response in chunks. this works even if a provider does not # implement streaming, as `astream()` defaults to yielding `_call()` # when `_stream()` is not implemented on the LLM class. diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py index 318f9a5dd..d6ecc6d81 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/fix.py @@ -92,7 +92,7 @@ async def process_message(self, message: HumanChatMessage): extra_instructions = message.prompt[4:].strip() or "None." self.get_llm_chain() - with self.pending("Analyzing error"): + with self.pending("Analyzing error", message): response = await self.llm_chain.apredict( extra_instructions=extra_instructions, stop=["\nHuman:"], diff --git a/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py b/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py index 8d6fb09aa..29e147f22 100644 --- a/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py +++ b/packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py @@ -187,7 +187,7 @@ async def process_message(self, message: HumanChatMessage): # delete and relearn index if embedding model was changed await self.delete_and_relearn() - with self.pending(f"Loading and splitting files for {load_path}"): + with self.pending(f"Loading and splitting files for {load_path}", message): try: await self.learn_dir( load_path, args.chunk_size, args.chunk_overlap, args.all_files diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 439193328..95b112652 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -210,14 +210,6 @@ def broadcast_message(self, message: Message): Appends message to chat history. """ - self.log.debug("Broadcasting message: %s to all clients...", message) - client_ids = self.root_chat_handlers.keys() - - for client_id in client_ids: - client = self.root_chat_handlers[client_id] - if client: - client.write_message(message.dict()) - # do not broadcast agent messages that are replying to cleared human message if ( isinstance(message, (AgentChatMessage, AgentStreamMessage)) @@ -228,6 +220,14 @@ def broadcast_message(self, message: Message): ]: return + self.log.debug("Broadcasting message: %s to all clients...", message) + client_ids = self.root_chat_handlers.keys() + + for client_id in client_ids: + client = self.root_chat_handlers[client_id] + if client: + client.write_message(message.dict()) + # append all messages of type `ChatMessage` directly to the chat history if isinstance( message, (HumanChatMessage, AgentChatMessage, AgentStreamMessage) @@ -328,19 +328,15 @@ async def _route(self, message): def _clear_chat_history_at(self, msg_id: str): """Clears the chat history at a specific message ID.""" - target_msg = None - for msg in self.chat_history: - if msg.id == msg_id: - target_msg = msg - - if target_msg is not None: - self.chat_history[:] = [ - msg for msg in self.chat_history if msg.time < target_msg.time - ] - self.pending_messages[:] = [ - msg for msg in self.pending_messages if msg.time < target_msg.time - ] - self.llm_chat_memory.clear(target_msg.time) + self.chat_history[:] = [ + msg + for msg in self.chat_history + if msg.id != msg_id and getattr(msg, "reply_to", None) != msg_id + ] + self.pending_messages[:] = [ + msg for msg in self.pending_messages if msg.reply_to != msg_id + ] + self.llm_chat_memory.clear(msg_id) def on_close(self): self.log.debug("Disconnecting client with user %s", self.client_id) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index 806853ce7..c1ff69a28 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -1,5 +1,5 @@ import time -from typing import List, Sequence +from typing import List, Sequence, Optional from langchain_core.chat_history import BaseChatMessageHistory from langchain_core.messages import BaseMessage @@ -7,7 +7,7 @@ from .models import HumanChatMessage -MESSAGE_TIME_KEY = "_jupyter_ai_msg_time" +HUMAN_MSG_ID_KEY = "_jupyter_ai_human_msg_id" class BoundedChatHistory(BaseChatMessageHistory, BaseModel): @@ -21,7 +21,7 @@ class BoundedChatHistory(BaseChatMessageHistory, BaseModel): k: int clear_time: float = 0.0 - clear_after: float = 0.0 + clear_msg_id: Optional[str] = None _all_messages: List[BaseMessage] = PrivateAttr(default_factory=list) @property @@ -33,33 +33,30 @@ async def aget_messages(self) -> List[BaseMessage]: def add_message(self, message: BaseMessage) -> None: """Add a self-created message to the store""" - # Adds a timestamp to the message as a fallback if message was not - # added not using WrappedBoundedChatHistory. - # In such a case, it possible that this message may be cleared even if - # the target clear message is after this one. - # This will occur if the current time is greater than the last_human_msg time of - # a future message that was added using WrappedBoundedChatHistory. - message.additional_kwargs[MESSAGE_TIME_KEY] = message.additional_kwargs.get( - MESSAGE_TIME_KEY, time.time() - ) + if HUMAN_MSG_ID_KEY not in message.additional_kwargs: + # human message id must be added to allow for targeted clearing of messages. + # `WrappedBoundedChatHistory` should be used instead to add messages. + raise ValueError( + "Message must have a human message ID to be added to the store." + ) self._all_messages.append(message) async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: """Add messages to the store""" self.add_messages(messages) - def clear(self, after: float = 0.0) -> None: + def clear(self, human_msg_id: Optional[str] = None) -> None: """Clear all messages after the given time""" - if after: + if human_msg_id: self._all_messages = [ m for m in self._all_messages - if m.additional_kwargs[MESSAGE_TIME_KEY] < after + if m.additional_kwargs[HUMAN_MSG_ID_KEY] != human_msg_id ] else: self._all_messages = [] self.clear_time = time.time() - self.clear_after = after + self.clear_msg_id = human_msg_id async def aclear(self) -> None: self.clear() @@ -95,13 +92,12 @@ def messages(self) -> List[BaseMessage]: def add_message(self, message: BaseMessage) -> None: # prevent adding pending messages to the store if clear was triggered. - # if partial clearing, allow adding pending messages that were not cleared. - if ( - self.last_human_msg.time - > self.history.clear_time | self.last_human_msg.time - < self.history.clear_after + # if targeted clearing, allow adding pending messages that were not cleared. + if self.last_human_msg.time > self.history.clear_time or ( + self.history.clear_msg_id + and self.last_human_msg.id != self.history.clear_msg_id ): - message.additional_kwargs[MESSAGE_TIME_KEY] = self.last_human_msg.time + message.additional_kwargs[HUMAN_MSG_ID_KEY] = self.last_human_msg.id self.history.add_message(message) async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index e465076a2..56ee10f02 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -126,6 +126,7 @@ class PendingMessage(BaseModel): id: str time: float body: str + reply_to: str persona: Persona ellipsis: bool = True closed: bool = False diff --git a/packages/jupyter-ai/src/chat_handler.ts b/packages/jupyter-ai/src/chat_handler.ts index 65c9fc085..c21b442ce 100644 --- a/packages/jupyter-ai/src/chat_handler.ts +++ b/packages/jupyter-ai/src/chat_handler.ts @@ -133,17 +133,14 @@ export class ChatHandler implements IDisposable { break; case 'clear': if (newMessage.target) { - const targetMsg = this._messages.find( - m => m.id === newMessage.target + this._messages = this._messages.filter( + msg => + msg.id != newMessage.target && + !('reply_to' in msg && msg.reply_to == newMessage.target) + ); + this._pendingMessages = this._pendingMessages.filter( + msg => msg.reply_to != newMessage.target ); - if (targetMsg) { - this._messages = this._messages.filter( - msg => msg.time < targetMsg.time - ); - this._pendingMessages = this._pendingMessages.filter( - msg => msg.time < targetMsg.time - ); - } } else { this._messages = []; this._pendingMessages = []; diff --git a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx index 7769413e8..d6fc691bd 100644 --- a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx +++ b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx @@ -21,7 +21,7 @@ export function ChatMessageDelete(props: DeleteButtonProps): JSX.Element { props.chatHandler.sendMessage(request)} sx={props.sx} - tooltip="Delete this and all future messages" + tooltip="Delete this exchange" > diff --git a/packages/jupyter-ai/src/handler.ts b/packages/jupyter-ai/src/handler.ts index 4cca0968e..caeb04150 100644 --- a/packages/jupyter-ai/src/handler.ts +++ b/packages/jupyter-ai/src/handler.ts @@ -151,6 +151,7 @@ export namespace AiService { id: string; time: number; body: string; + reply_to: string; persona: Persona; ellipsis: boolean; }; From f41afd841ab8d88e7a467686f86141598bbd202f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 01:39:24 +0000 Subject: [PATCH 17/22] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- packages/jupyter-ai/jupyter_ai/history.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index c1ff69a28..a70c20cc4 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -1,5 +1,5 @@ import time -from typing import List, Sequence, Optional +from typing import List, Optional, Sequence from langchain_core.chat_history import BaseChatMessageHistory from langchain_core.messages import BaseMessage From 5b574e4100e0abca73be2b639f49a0384e3b493e Mon Sep 17 00:00:00 2001 From: michael Date: Wed, 4 Sep 2024 09:44:57 +0800 Subject: [PATCH 18/22] fix lint --- packages/jupyter-ai/src/chat_handler.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/jupyter-ai/src/chat_handler.ts b/packages/jupyter-ai/src/chat_handler.ts index c21b442ce..2ff8b108c 100644 --- a/packages/jupyter-ai/src/chat_handler.ts +++ b/packages/jupyter-ai/src/chat_handler.ts @@ -135,11 +135,11 @@ export class ChatHandler implements IDisposable { if (newMessage.target) { this._messages = this._messages.filter( msg => - msg.id != newMessage.target && - !('reply_to' in msg && msg.reply_to == newMessage.target) + msg.id !== newMessage.target && + !('reply_to' in msg && msg.reply_to === newMessage.target) ); this._pendingMessages = this._pendingMessages.filter( - msg => msg.reply_to != newMessage.target + msg => msg.reply_to !== newMessage.target ); } else { this._messages = []; From be19f1da6c3f4824ec8a2dabb1f2a23d9d163c64 Mon Sep 17 00:00:00 2001 From: michaelchia Date: Fri, 6 Sep 2024 08:59:24 +0800 Subject: [PATCH 19/22] refine docs Co-authored-by: david qiu --- packages/jupyter-ai/jupyter_ai/handlers.py | 4 +++- packages/jupyter-ai/jupyter_ai/models.py | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index 95b112652..cc34369c5 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -327,7 +327,9 @@ async def _route(self, message): self.log.info(f"{command_readable} chat handler resolved in {latency_ms} ms.") def _clear_chat_history_at(self, msg_id: str): - """Clears the chat history at a specific message ID.""" + """ + Clears a conversation exchange given a human message ID `msg_id`. + """ self.chat_history[:] = [ msg for msg in self.chat_history diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index 56ee10f02..7e663d55d 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -43,7 +43,7 @@ class ClearRequest(BaseModel): type: Literal["clear"] target: Optional[str] """ - Message ID of the ChatMessage to clear at and all messages after. + Message ID of the HumanChatMessage to delete an exchange at. If not provided, this requests the backend to clear all messages. """ @@ -116,7 +116,7 @@ class ClearMessage(BaseModel): type: Literal["clear"] = "clear" target: Optional[str] = None """ - Message ID of the ChatMessage to clear at and all messages after. + Message ID of the HumanChatMessage to delete an exchange at. If not provided, this instructs the frontend to clear all messages. """ From 7b185a1bb76999e6c025d16c93345e72780227fb Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 6 Sep 2024 09:26:42 +0800 Subject: [PATCH 20/22] keep list of cleared messages --- packages/jupyter-ai/jupyter_ai/history.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index a70c20cc4..610617255 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -1,5 +1,5 @@ import time -from typing import List, Optional, Sequence +from typing import List, Optional, Sequence, Set from langchain_core.chat_history import BaseChatMessageHistory from langchain_core.messages import BaseMessage @@ -21,7 +21,7 @@ class BoundedChatHistory(BaseChatMessageHistory, BaseModel): k: int clear_time: float = 0.0 - clear_msg_id: Optional[str] = None + cleared_msgs: Set[str] = set() _all_messages: List[BaseMessage] = PrivateAttr(default_factory=list) @property @@ -53,10 +53,11 @@ def clear(self, human_msg_id: Optional[str] = None) -> None: for m in self._all_messages if m.additional_kwargs[HUMAN_MSG_ID_KEY] != human_msg_id ] + self.cleared_msgs.add(human_msg_id) else: self._all_messages = [] - self.clear_time = time.time() - self.clear_msg_id = human_msg_id + self.cleared_msgs = set() + self.clear_time = time.time() async def aclear(self) -> None: self.clear() @@ -92,10 +93,10 @@ def messages(self) -> List[BaseMessage]: def add_message(self, message: BaseMessage) -> None: # prevent adding pending messages to the store if clear was triggered. - # if targeted clearing, allow adding pending messages that were not cleared. - if self.last_human_msg.time > self.history.clear_time or ( - self.history.clear_msg_id - and self.last_human_msg.id != self.history.clear_msg_id + # if targeted clearing, prevent adding target message if still pending. + if ( + self.last_human_msg.time > self.history.clear_time + and self.last_human_msg.id not in self.history.cleared_msgs ): message.additional_kwargs[HUMAN_MSG_ID_KEY] = self.last_human_msg.id self.history.add_message(message) From cdf864b31c3ec4d25e42d14a7e24ff2c07c86065 Mon Sep 17 00:00:00 2001 From: michael Date: Fri, 6 Sep 2024 09:44:00 +0800 Subject: [PATCH 21/22] update doc --- packages/jupyter-ai/jupyter_ai/history.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index 610617255..f96c80fe2 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -46,7 +46,9 @@ async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: self.add_messages(messages) def clear(self, human_msg_id: Optional[str] = None) -> None: - """Clear all messages after the given time""" + """Clears conversation exchanges. If `human_msg_id` is provided, only + clears the respective human message and its reply. Otherwise, clears + all messages.""" if human_msg_id: self._all_messages = [ m From f4d996e66e049080434778d4ad79efa7bcf93e31 Mon Sep 17 00:00:00 2001 From: michael Date: Sun, 8 Sep 2024 11:46:30 +0800 Subject: [PATCH 22/22] support clearing all subsequent exchanges --- packages/jupyter-ai/jupyter_ai/handlers.py | 31 ++++++++++++++----- packages/jupyter-ai/jupyter_ai/history.py | 9 +++--- packages/jupyter-ai/jupyter_ai/models.py | 9 ++++-- packages/jupyter-ai/src/chat_handler.ts | 9 +++--- .../chat-messages/chat-message-delete.tsx | 3 +- packages/jupyter-ai/src/handler.ts | 3 +- 6 files changed, 43 insertions(+), 21 deletions(-) diff --git a/packages/jupyter-ai/jupyter_ai/handlers.py b/packages/jupyter-ai/jupyter_ai/handlers.py index cc34369c5..a614e3e84 100644 --- a/packages/jupyter-ai/jupyter_ai/handlers.py +++ b/packages/jupyter-ai/jupyter_ai/handlers.py @@ -255,8 +255,8 @@ def broadcast_message(self, message: Message): filter(lambda m: m.id != message.id, self.pending_messages) ) elif isinstance(message, ClearMessage): - if message.target: - self._clear_chat_history_at(message.target) + if message.targets: + self._clear_chat_history_at(message.targets) else: self.chat_history.clear() self.pending_messages.clear() @@ -277,7 +277,22 @@ async def on_message(self, message): return if isinstance(request, ClearRequest): - self.broadcast_message(ClearMessage(target=request.target)) + if not request.target: + targets = None + elif request.after: + target_msg = None + for msg in self.chat_history: + if msg.id == request.target: + target_msg = msg + if target_msg: + targets = [ + msg.id + for msg in self.chat_history + if msg.time >= target_msg.time and msg.type == "human" + ] + else: + targets = [request.target] + self.broadcast_message(ClearMessage(targets=targets)) return chat_request = request @@ -326,19 +341,19 @@ async def _route(self, message): command_readable = "Default" if command == "default" else command self.log.info(f"{command_readable} chat handler resolved in {latency_ms} ms.") - def _clear_chat_history_at(self, msg_id: str): + def _clear_chat_history_at(self, msg_ids: List[str]): """ - Clears a conversation exchange given a human message ID `msg_id`. + Clears conversation exchanges associated with list of human message IDs. """ self.chat_history[:] = [ msg for msg in self.chat_history - if msg.id != msg_id and getattr(msg, "reply_to", None) != msg_id + if msg.id not in msg_ids and getattr(msg, "reply_to", None) not in msg_ids ] self.pending_messages[:] = [ - msg for msg in self.pending_messages if msg.reply_to != msg_id + msg for msg in self.pending_messages if msg.reply_to not in msg_ids ] - self.llm_chat_memory.clear(msg_id) + self.llm_chat_memory.clear(msg_ids) def on_close(self): self.log.debug("Disconnecting client with user %s", self.client_id) diff --git a/packages/jupyter-ai/jupyter_ai/history.py b/packages/jupyter-ai/jupyter_ai/history.py index f96c80fe2..9e1064194 100644 --- a/packages/jupyter-ai/jupyter_ai/history.py +++ b/packages/jupyter-ai/jupyter_ai/history.py @@ -45,17 +45,17 @@ async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: """Add messages to the store""" self.add_messages(messages) - def clear(self, human_msg_id: Optional[str] = None) -> None: + def clear(self, human_msg_ids: Optional[List[str]] = None) -> None: """Clears conversation exchanges. If `human_msg_id` is provided, only clears the respective human message and its reply. Otherwise, clears all messages.""" - if human_msg_id: + if human_msg_ids: self._all_messages = [ m for m in self._all_messages - if m.additional_kwargs[HUMAN_MSG_ID_KEY] != human_msg_id + if m.additional_kwargs[HUMAN_MSG_ID_KEY] not in human_msg_ids ] - self.cleared_msgs.add(human_msg_id) + self.cleared_msgs.update(human_msg_ids) else: self._all_messages = [] self.cleared_msgs = set() @@ -95,7 +95,6 @@ def messages(self) -> List[BaseMessage]: def add_message(self, message: BaseMessage) -> None: # prevent adding pending messages to the store if clear was triggered. - # if targeted clearing, prevent adding target message if still pending. if ( self.last_human_msg.time > self.history.clear_time and self.last_human_msg.id not in self.history.cleared_msgs diff --git a/packages/jupyter-ai/jupyter_ai/models.py b/packages/jupyter-ai/jupyter_ai/models.py index 7e663d55d..f9098a12a 100644 --- a/packages/jupyter-ai/jupyter_ai/models.py +++ b/packages/jupyter-ai/jupyter_ai/models.py @@ -47,6 +47,11 @@ class ClearRequest(BaseModel): If not provided, this requests the backend to clear all messages. """ + after: Optional[bool] + """ + Whether to clear target and all subsequent exchanges. + """ + class ChatUser(BaseModel): # User ID assigned by IdentityProvider. @@ -114,9 +119,9 @@ class HumanChatMessage(BaseModel): class ClearMessage(BaseModel): type: Literal["clear"] = "clear" - target: Optional[str] = None + targets: Optional[List[str]] = None """ - Message ID of the HumanChatMessage to delete an exchange at. + Message IDs of the HumanChatMessage to delete an exchange at. If not provided, this instructs the frontend to clear all messages. """ diff --git a/packages/jupyter-ai/src/chat_handler.ts b/packages/jupyter-ai/src/chat_handler.ts index 2ff8b108c..76c93a851 100644 --- a/packages/jupyter-ai/src/chat_handler.ts +++ b/packages/jupyter-ai/src/chat_handler.ts @@ -132,14 +132,15 @@ export class ChatHandler implements IDisposable { case 'connection': break; case 'clear': - if (newMessage.target) { + if (newMessage.targets) { + const targets = newMessage.targets; this._messages = this._messages.filter( msg => - msg.id !== newMessage.target && - !('reply_to' in msg && msg.reply_to === newMessage.target) + !targets.includes(msg.id) && + !('reply_to' in msg && targets.includes(msg.reply_to)) ); this._pendingMessages = this._pendingMessages.filter( - msg => msg.reply_to !== newMessage.target + msg => !targets.includes(msg.reply_to) ); } else { this._messages = []; diff --git a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx index d6fc691bd..b91e15b93 100644 --- a/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx +++ b/packages/jupyter-ai/src/components/chat-messages/chat-message-delete.tsx @@ -15,7 +15,8 @@ type DeleteButtonProps = { export function ChatMessageDelete(props: DeleteButtonProps): JSX.Element { const request: AiService.ClearRequest = { type: 'clear', - target: props.message.id + target: props.message.id, + after: false }; return (