From 386cdef2d75efe446091d049d9c469fd0974a571 Mon Sep 17 00:00:00 2001 From: David Brochart Date: Mon, 4 Nov 2024 16:28:47 +0100 Subject: [PATCH 1/5] Add forking API (#394) * Add forking API * Add GET forks of root * Add fork Jupyter events * Replace query parameter merge=1 with merge=true * Add fork title and description --- .../jupyter_server_ydoc/app.py | 10 ++ .../jupyter_server_ydoc/events/fork.yaml | 56 +++++++ .../jupyter_server_ydoc/handlers.py | 100 ++++++++++++ .../jupyter_server_ydoc/pytest_plugin.py | 59 +++++++ .../jupyter_server_ydoc/utils.py | 2 + projects/jupyter-server-ydoc/pyproject.toml | 1 + tests/test_handlers.py | 146 ++++++++++++++++++ 7 files changed, 374 insertions(+) create mode 100644 projects/jupyter-server-ydoc/jupyter_server_ydoc/events/fork.yaml diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/app.py b/projects/jupyter-server-ydoc/jupyter_server_ydoc/app.py index e286cc13..b49ee7af 100644 --- a/projects/jupyter-server-ydoc/jupyter_server_ydoc/app.py +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/app.py @@ -14,6 +14,7 @@ from traitlets import Bool, Float, Type from .handlers import ( + DocForkHandler, DocSessionHandler, TimelineHandler, UndoRedoHandler, @@ -25,6 +26,7 @@ from .utils import ( AWARENESS_EVENTS_SCHEMA_PATH, EVENTS_SCHEMA_PATH, + FORK_EVENTS_SCHEMA_PATH, encode_file_path, room_id_from_encoded_path, ) @@ -85,6 +87,7 @@ def initialize(self): super().initialize() self.serverapp.event_logger.register_event_schema(EVENTS_SCHEMA_PATH) self.serverapp.event_logger.register_event_schema(AWARENESS_EVENTS_SCHEMA_PATH) + self.serverapp.event_logger.register_event_schema(FORK_EVENTS_SCHEMA_PATH) def initialize_settings(self): self.settings.update( @@ -123,6 +126,13 @@ def initialize_handlers(self): self.handlers.extend( [ + ( + r"/api/collaboration/fork/(.*)", + DocForkHandler, + { + "ywebsocket_server": self.ywebsocket_server, + }, + ), ( r"/api/collaboration/room/(.*)", YDocWebSocketHandler, diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/events/fork.yaml b/projects/jupyter-server-ydoc/jupyter_server_ydoc/events/fork.yaml new file mode 100644 index 00000000..0125fdf2 --- /dev/null +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/events/fork.yaml @@ -0,0 +1,56 @@ +"$id": https://schema.jupyter.org/jupyter_collaboration/fork/v1 +"$schema": "http://json-schema.org/draft-07/schema" +version: 1 +title: Collaborative fork events +personal-data: true +description: | + Fork events emitted from server-side during a collaborative session. +type: object +required: + - fork_roomid + - fork_info + - username + - action +properties: + fork_roomid: + type: string + description: | + Fork root room ID. + fork_info: + type: object + description: | + Fork root room information. + required: + - root_roomid + - synchronize + - title + - description + properties: + root_roomid: + type: string + description: | + Root room ID. Usually composed by the file type, format and ID. + synchronize: + type: boolean + description: | + Whether the fork is kept in sync with the root. + title: + type: string + description: | + The title of the fork. + description: + type: string + description: | + The description of the fork. + username: + type: string + description: | + The name of the user who created or deleted the fork. + action: + enum: + - create + - delete + description: | + Possible values: + 1. create + 2. delete diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/handlers.py b/projects/jupyter-server-ydoc/jupyter_server_ydoc/handlers.py index 586e4dc1..b76666d4 100644 --- a/projects/jupyter-server-ydoc/jupyter_server_ydoc/handlers.py +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/handlers.py @@ -26,6 +26,7 @@ from .utils import ( JUPYTER_COLLABORATION_AWARENESS_EVENTS_URI, JUPYTER_COLLABORATION_EVENTS_URI, + JUPYTER_COLLABORATION_FORK_EVENTS_URI, LogLevel, MessageType, decode_file_path, @@ -39,6 +40,7 @@ SERVER_SESSION = str(uuid.uuid4()) FORK_DOCUMENTS = {} +FORK_ROOMS: dict[str, dict[str, str]] = {} class YDocWebSocketHandler(WebSocketHandler, JupyterHandler): @@ -600,3 +602,101 @@ async def _cleanup_undo_manager(self, room_id: str) -> None: if room_id in FORK_DOCUMENTS: del FORK_DOCUMENTS[room_id] self.log.info(f"Fork Document for {room_id} has been removed.") + + +class DocForkHandler(APIHandler): + """ + Jupyter Server handler to: + - create a fork of a root document (optionally synchronizing with the root document), + - delete a fork of a root document (optionally merging back in the root document). + - get fork IDs of a root document. + """ + + auth_resource = "contents" + + def initialize( + self, + ywebsocket_server: JupyterWebsocketServer, + ) -> None: + self._websocket_server = ywebsocket_server + + @web.authenticated + @authorized + async def get(self, root_roomid): + """ + Returns a dictionary of fork room ID to fork room information for the given root room ID. + """ + self.write( + { + fork_roomid: fork_info + for fork_roomid, fork_info in FORK_ROOMS.items() + if fork_info["root_roomid"] == root_roomid + } + ) + + @web.authenticated + @authorized + async def put(self, root_roomid): + """ + Creates a fork of a root document and returns its ID. + Optionally keeps the fork in sync with the root. + """ + fork_roomid = uuid4().hex + root_room = await self._websocket_server.get_room(root_roomid) + update = root_room.ydoc.get_update() + fork_ydoc = Doc() + fork_ydoc.apply_update(update) + model = self.get_json_body() + synchronize = model.get("synchronize", False) + if synchronize: + root_room.ydoc.observe(lambda event: fork_ydoc.apply_update(event.update)) + FORK_ROOMS[fork_roomid] = fork_info = { + "root_roomid": root_roomid, + "synchronize": synchronize, + "title": model.get("title", ""), + "description": model.get("description", ""), + } + fork_room = YRoom(ydoc=fork_ydoc) + self._websocket_server.rooms[fork_roomid] = fork_room + await self._websocket_server.start_room(fork_room) + self._emit_fork_event(self.current_user.username, fork_roomid, fork_info, "create") + data = json.dumps( + { + "sessionId": SERVER_SESSION, + "fork_roomid": fork_roomid, + "fork_info": fork_info, + } + ) + self.set_status(201) + return self.finish(data) + + @web.authenticated + @authorized + async def delete(self, fork_roomid): + """ + Deletes a forked document, and optionally merges it back in the root document. + """ + fork_info = FORK_ROOMS[fork_roomid] + root_roomid = fork_info["root_roomid"] + del FORK_ROOMS[fork_roomid] + if self.get_query_argument("merge") == "true": + root_room = await self._websocket_server.get_room(root_roomid) + root_ydoc = root_room.ydoc + fork_room = await self._websocket_server.get_room(fork_roomid) + fork_ydoc = fork_room.ydoc + fork_update = fork_ydoc.get_update() + root_ydoc.apply_update(fork_update) + await self._websocket_server.delete_room(name=fork_roomid) + self._emit_fork_event(self.current_user.username, fork_roomid, fork_info, "delete") + self.set_status(200) + + def _emit_fork_event( + self, username: str, fork_roomid: str, fork_info: dict[str, str], action: str + ) -> None: + data = { + "username": username, + "fork_roomid": fork_roomid, + "fork_info": fork_info, + "action": action, + } + self.event_logger.emit(schema_id=JUPYTER_COLLABORATION_FORK_EVENTS_URI, data=data) diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py b/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py index 8bb8479e..7ac9b18f 100644 --- a/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py @@ -154,6 +154,65 @@ async def _inner(format: str, type: str, path: str) -> Any: return _inner +@pytest.fixture +def rtc_connect_fork_client(jp_http_port, jp_base_url, rtc_fetch_session): + async def _inner(room_id: str) -> Any: + return connect( + f"ws://127.0.0.1:{jp_http_port}{jp_base_url}api/collaboration/room/{room_id}" + ) + + return _inner + + +@pytest.fixture +def rtc_get_forks_client(jp_fetch): + async def _inner(root_roomid: str) -> Any: + return await jp_fetch( + "/api/collaboration/fork", + root_roomid, + method="GET", + ) + + return _inner + + +@pytest.fixture +def rtc_create_fork_client(jp_fetch): + async def _inner( + root_roomid: str, + synchronize: bool, + title: str | None = None, + description: str | None = None, + ) -> Any: + return await jp_fetch( + "/api/collaboration/fork", + root_roomid, + method="PUT", + body=json.dumps( + { + "synchronize": synchronize, + "title": title, + "description": description, + } + ), + ) + + return _inner + + +@pytest.fixture +def rtc_delete_fork_client(jp_fetch): + async def _inner(fork_roomid: str, merge: bool) -> Any: + return await jp_fetch( + "/api/collaboration/fork", + fork_roomid, + method="DELETE", + params={"merge": str(merge).lower()}, + ) + + return _inner + + @pytest.fixture def rtc_add_doc_to_store(rtc_connect_doc_client): event = Event() diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/utils.py b/projects/jupyter-server-ydoc/jupyter_server_ydoc/utils.py index d1c74ce4..22c51d87 100644 --- a/projects/jupyter-server-ydoc/jupyter_server_ydoc/utils.py +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/utils.py @@ -11,7 +11,9 @@ JUPYTER_COLLABORATION_AWARENESS_EVENTS_URI = ( "https://schema.jupyter.org/jupyter_collaboration/awareness/v1" ) +JUPYTER_COLLABORATION_FORK_EVENTS_URI = "https://schema.jupyter.org/jupyter_collaboration/fork/v1" AWARENESS_EVENTS_SCHEMA_PATH = EVENTS_FOLDER_PATH / "awareness.yaml" +FORK_EVENTS_SCHEMA_PATH = EVENTS_FOLDER_PATH / "fork.yaml" class MessageType(IntEnum): diff --git a/projects/jupyter-server-ydoc/pyproject.toml b/projects/jupyter-server-ydoc/pyproject.toml index dce09301..52787208 100644 --- a/projects/jupyter-server-ydoc/pyproject.toml +++ b/projects/jupyter-server-ydoc/pyproject.toml @@ -41,6 +41,7 @@ dynamic = ["version"] [project.optional-dependencies] test = [ "coverage", + "dirty-equals", "jupyter_server[test]>=2.4.0", "jupyter_server_fileid[test]", "pytest>=7.0", diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 7cb55e0b..199dd64c 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -7,9 +7,11 @@ from asyncio import Event, sleep from typing import Any +from dirty_equals import IsStr from jupyter_events.logger import EventLogger from jupyter_server_ydoc.test_utils import Websocket from jupyter_ydoc import YUnicode +from pycrdt import Text from pycrdt_websocket import WebsocketProvider @@ -211,3 +213,147 @@ async def my_listener(logger: EventLogger, schema_id: str, data: dict) -> None: await jp_serverapp.web_app.settings["jupyter_server_ydoc"].stop_extension() del jp_serverapp.web_app.settings["file_id_manager"] + + +async def test_fork_handler( + jp_serverapp, + rtc_create_file, + rtc_connect_doc_client, + rtc_connect_fork_client, + rtc_get_forks_client, + rtc_create_fork_client, + rtc_delete_fork_client, + rtc_fetch_session, +): + collected_data = [] + + async def my_listener(logger: EventLogger, schema_id: str, data: dict) -> None: + collected_data.append(data) + + event_logger = jp_serverapp.event_logger + event_logger.add_listener( + schema_id="https://schema.jupyter.org/jupyter_collaboration/fork/v1", + listener=my_listener, + ) + + path, _ = await rtc_create_file("test.txt", "Hello") + + root_connect_event = Event() + + def _on_root_change(topic: str, event: Any) -> None: + if topic == "source": + root_connect_event.set() + + root_ydoc = YUnicode() + root_ydoc.observe(_on_root_change) + + resp = await rtc_fetch_session("text", "file", path) + data = json.loads(resp.body.decode("utf-8")) + file_id = data["fileId"] + root_roomid = f"text:file:{file_id}" + + async with await rtc_connect_doc_client("text", "file", path) as ws, WebsocketProvider( + root_ydoc.ydoc, ws + ): + await root_connect_event.wait() + + resp = await rtc_create_fork_client(root_roomid, False, "my fork0", "is awesome0") + data = json.loads(resp.body.decode("utf-8")) + fork_roomid0 = data["fork_roomid"] + + resp = await rtc_get_forks_client(root_roomid) + data = json.loads(resp.body.decode("utf-8")) + expected_data0 = { + fork_roomid0: { + "root_roomid": root_roomid, + "synchronize": False, + "title": "my fork0", + "description": "is awesome0", + } + } + assert data == expected_data0 + + assert collected_data == [ + { + "username": IsStr(), + "fork_roomid": fork_roomid0, + "fork_info": expected_data0[fork_roomid0], + "action": "create", + } + ] + + resp = await rtc_create_fork_client(root_roomid, True, "my fork1", "is awesome1") + data = json.loads(resp.body.decode("utf-8")) + fork_roomid1 = data["fork_roomid"] + + resp = await rtc_get_forks_client(root_roomid) + data = json.loads(resp.body.decode("utf-8")) + expected_data1 = { + fork_roomid1: { + "root_roomid": root_roomid, + "synchronize": True, + "title": "my fork1", + "description": "is awesome1", + } + } + expected_data = dict(**expected_data0, **expected_data1) + assert data == expected_data + + assert len(collected_data) == 2 + assert collected_data[1] == { + "username": IsStr(), + "fork_roomid": fork_roomid1, + "fork_info": expected_data[fork_roomid1], + "action": "create", + } + + fork_ydoc = YUnicode() + fork_connect_event = Event() + + def _on_fork_change(topic: str, event: Any) -> None: + if topic == "source": + fork_connect_event.set() + + fork_ydoc.observe(_on_fork_change) + fork_text = fork_ydoc.ydoc.get("source", type=Text) + + async with await rtc_connect_fork_client(fork_roomid1) as ws, WebsocketProvider( + fork_ydoc.ydoc, ws + ): + await fork_connect_event.wait() + root_text = root_ydoc.ydoc.get("source", type=Text) + root_text += ", World!" + await sleep(0.1) + assert str(fork_text) == "Hello, World!" + fork_text += " Hi!" + + await sleep(0.1) + assert str(root_text) == "Hello, World!" + + await rtc_delete_fork_client(fork_roomid0, True) + await sleep(0.1) + assert str(root_text) == "Hello, World!" + resp = await rtc_get_forks_client(root_roomid) + data = json.loads(resp.body.decode("utf-8")) + assert data == expected_data1 + assert len(collected_data) == 3 + assert collected_data[2] == { + "username": IsStr(), + "fork_roomid": fork_roomid0, + "fork_info": expected_data[fork_roomid0], + "action": "delete", + } + + await rtc_delete_fork_client(fork_roomid1, True) + await sleep(0.1) + assert str(root_text) == "Hello, World! Hi!" + resp = await rtc_get_forks_client(root_roomid) + data = json.loads(resp.body.decode("utf-8")) + assert data == {} + assert len(collected_data) == 4 + assert collected_data[3] == { + "username": IsStr(), + "fork_roomid": fork_roomid1, + "fork_info": expected_data[fork_roomid1], + "action": "delete", + } From 7a5613f7597c2e2d1adaa2af4c303f9578856ef6 Mon Sep 17 00:00:00 2001 From: Duc Trung Le Date: Fri, 8 Nov 2024 21:49:47 +0100 Subject: [PATCH 2/5] Add js api (#395) * Add forking API (#394) * Add forking API * Add GET forks of root * Add fork Jupyter events * Replace query parameter merge=1 with merge=true * Add fork title and description * Add JS APIs --------- Co-authored-by: David Brochart --- .../docprovider-extension/src/forkManager.ts | 28 ++++ packages/docprovider-extension/src/index.ts | 4 +- .../src/__tests__/forkManager.spec.ts | 96 +++++++++++++ packages/docprovider/src/component.tsx | 1 - packages/docprovider/src/forkManager.ts | 126 ++++++++++++++++++ packages/docprovider/src/index.ts | 2 + packages/docprovider/src/requests.ts | 41 ++++++ packages/docprovider/src/tokens.ts | 117 ++++++++++++++++ packages/docprovider/src/ydrive.ts | 1 - .../jupyter_server_ydoc/pytest_plugin.py | 2 +- 10 files changed, 414 insertions(+), 4 deletions(-) create mode 100644 packages/docprovider-extension/src/forkManager.ts create mode 100644 packages/docprovider/src/__tests__/forkManager.spec.ts create mode 100644 packages/docprovider/src/forkManager.ts create mode 100644 packages/docprovider/src/tokens.ts diff --git a/packages/docprovider-extension/src/forkManager.ts b/packages/docprovider-extension/src/forkManager.ts new file mode 100644 index 00000000..5c2cf916 --- /dev/null +++ b/packages/docprovider-extension/src/forkManager.ts @@ -0,0 +1,28 @@ +/* + * Copyright (c) Jupyter Development Team. + * Distributed under the terms of the Modified BSD License. + */ + +import { ICollaborativeDrive } from '@jupyter/collaborative-drive'; +import { + ForkManager, + IForkManager, + IForkManagerToken +} from '@jupyter/docprovider'; + +import { + JupyterFrontEnd, + JupyterFrontEndPlugin +} from '@jupyterlab/application'; + +export const forkManagerPlugin: JupyterFrontEndPlugin = { + id: '@jupyter/docprovider-extension:forkManager', + autoStart: true, + requires: [ICollaborativeDrive], + provides: IForkManagerToken, + activate: (app: JupyterFrontEnd, drive: ICollaborativeDrive) => { + const eventManager = app.serviceManager.events; + const manager = new ForkManager({ drive, eventManager }); + return manager; + } +}; diff --git a/packages/docprovider-extension/src/index.ts b/packages/docprovider-extension/src/index.ts index 1869c5b7..556f7470 100644 --- a/packages/docprovider-extension/src/index.ts +++ b/packages/docprovider-extension/src/index.ts @@ -16,6 +16,7 @@ import { statusBarTimeline } from './filebrowser'; import { notebookCellExecutor } from './executor'; +import { forkManagerPlugin } from './forkManager'; /** * Export the plugins as default. @@ -27,7 +28,8 @@ const plugins: JupyterFrontEndPlugin[] = [ defaultFileBrowser, logger, notebookCellExecutor, - statusBarTimeline + statusBarTimeline, + forkManagerPlugin ]; export default plugins; diff --git a/packages/docprovider/src/__tests__/forkManager.spec.ts b/packages/docprovider/src/__tests__/forkManager.spec.ts new file mode 100644 index 00000000..c664a36d --- /dev/null +++ b/packages/docprovider/src/__tests__/forkManager.spec.ts @@ -0,0 +1,96 @@ +// Copyright (c) Jupyter Development Team. +// Distributed under the terms of the Modified BSD License. + +import { ICollaborativeDrive } from '@jupyter/collaborative-drive'; +import { + ForkManager, + JUPYTER_COLLABORATION_FORK_EVENTS_URI +} from '../forkManager'; +import { Event } from '@jupyterlab/services'; +import { Signal } from '@lumino/signaling'; +import { requestAPI } from '../requests'; +jest.mock('../requests'); + +const driveMock = { + name: 'rtc', + providers: new Map() +} as ICollaborativeDrive; +const stream = new Signal({}); +const eventManagerMock = { + stream: stream as any +} as Event.IManager; + +describe('@jupyter/docprovider', () => { + let manager: ForkManager; + beforeEach(() => { + manager = new ForkManager({ + drive: driveMock, + eventManager: eventManagerMock + }); + }); + describe('forkManager', () => { + it('should have a type', () => { + expect(ForkManager).not.toBeUndefined(); + }); + it('should be able to create instance', () => { + expect(manager).toBeInstanceOf(ForkManager); + }); + it('should be able to create new fork', async () => { + await manager.createFork({ + rootId: 'root-uuid', + synchronize: true, + title: 'my fork label', + description: 'my fork description' + }); + expect(requestAPI).toHaveBeenCalledWith( + 'api/collaboration/fork/root-uuid', + { + method: 'PUT', + body: JSON.stringify({ + title: 'my fork label', + description: 'my fork description', + synchronize: true + }) + } + ); + }); + it('should be able to get all forks', async () => { + await manager.getAllForks('root-uuid'); + expect(requestAPI).toHaveBeenCalledWith( + 'api/collaboration/fork/root-uuid', + { + method: 'GET' + } + ); + }); + it('should be able to get delete forks', async () => { + await manager.deleteFork({ forkId: 'fork-uuid', merge: true }); + expect(requestAPI).toHaveBeenCalledWith( + 'api/collaboration/fork/fork-uuid?merge=true', + { + method: 'DELETE' + } + ); + }); + it('should be able to emit fork added signal', async () => { + const listener = jest.fn(); + manager.forkAdded.connect(listener); + const data = { + schema_id: JUPYTER_COLLABORATION_FORK_EVENTS_URI, + action: 'create' + }; + stream.emit(data); + expect(listener).toHaveBeenCalledWith(manager, data); + }); + it('should be able to emit fork deleted signal', async () => { + const listener = jest.fn(); + manager.forkDeleted.connect(listener); + const data = { + schema_id: JUPYTER_COLLABORATION_FORK_EVENTS_URI, + action: 'delete' + }; + stream.emit(data); + expect(listener).toHaveBeenCalledWith(manager, data); + }); + }); +}); diff --git a/packages/docprovider/src/component.tsx b/packages/docprovider/src/component.tsx index 13351aa3..5a6d4475 100644 --- a/packages/docprovider/src/component.tsx +++ b/packages/docprovider/src/component.tsx @@ -76,7 +76,6 @@ export const TimelineSliderComponent: React.FC = ({ setData(data); setCurrentTimestampIndex(data.timestamps.length - 1); provider.connectToForkDoc(data.forkRoom, data.sessionId); - sessionRef.current = await requestDocSession( format, contentType, diff --git a/packages/docprovider/src/forkManager.ts b/packages/docprovider/src/forkManager.ts new file mode 100644 index 00000000..f781e802 --- /dev/null +++ b/packages/docprovider/src/forkManager.ts @@ -0,0 +1,126 @@ +/* + * Copyright (c) Jupyter Development Team. + * Distributed under the terms of the Modified BSD License. + */ + +import { ICollaborativeDrive } from '@jupyter/collaborative-drive'; +import { URLExt } from '@jupyterlab/coreutils'; +import { Event } from '@jupyterlab/services'; +import { ISignal, Signal } from '@lumino/signaling'; + +import { requestAPI, ROOM_FORK_URL } from './requests'; +import { + IAllForksResponse, + IForkChangedEvent, + IForkCreationResponse, + IForkManager +} from './tokens'; +import { IForkProvider } from './ydrive'; + +export const JUPYTER_COLLABORATION_FORK_EVENTS_URI = + 'https://schema.jupyter.org/jupyter_collaboration/fork/v1'; + +export class ForkManager implements IForkManager { + constructor(options: ForkManager.IOptions) { + const { drive, eventManager } = options; + this._drive = drive; + this._eventManager = eventManager; + this._eventManager.stream.connect(this._handleEvent, this); + } + + get isDisposed(): boolean { + return this._disposed; + } + get forkAdded(): ISignal { + return this._forkAddedSignal; + } + get forkDeleted(): ISignal { + return this._forkDeletedSignal; + } + + dispose(): void { + if (this._disposed) { + return; + } + this._eventManager?.stream.disconnect(this._handleEvent); + this._disposed = true; + } + async createFork(options: { + rootId: string; + synchronize: boolean; + title?: string; + description?: string; + }): Promise { + const { rootId, title, description, synchronize } = options; + const init: RequestInit = { + method: 'PUT', + body: JSON.stringify({ title, description, synchronize }) + }; + const url = URLExt.join(ROOM_FORK_URL, rootId); + const response = await requestAPI(url, init); + return response; + } + + async getAllForks(rootId: string) { + const url = URLExt.join(ROOM_FORK_URL, rootId); + const init = { method: 'GET' }; + const response = await requestAPI(url, init); + return response; + } + + async deleteFork(options: { forkId: string; merge: boolean }): Promise { + const { forkId, merge } = options; + const url = URLExt.join(ROOM_FORK_URL, forkId); + const query = URLExt.objectToQueryString({ merge }); + const init = { method: 'DELETE' }; + await requestAPI(`${url}${query}`, init); + } + getProvider(options: { + documentPath: string; + format: string; + type: string; + }): IForkProvider | undefined { + const { documentPath, format, type } = options; + const drive = this._drive; + if (drive) { + const driveName = drive.name; + let docPath = documentPath; + if (documentPath.startsWith(driveName)) { + docPath = documentPath.slice(driveName.length + 1); + } + const provider = drive.providers.get(`${format}:${type}:${docPath}`); + return provider as IForkProvider | undefined; + } + return; + } + + private _handleEvent(_: Event.IManager, emission: Event.Emission) { + if (emission.schema_id === JUPYTER_COLLABORATION_FORK_EVENTS_URI) { + switch (emission.action) { + case 'create': { + this._forkAddedSignal.emit(emission as any); + break; + } + case 'delete': { + this._forkDeletedSignal.emit(emission as any); + break; + } + default: + break; + } + } + } + + private _disposed = false; + private _drive: ICollaborativeDrive | undefined; + private _eventManager: Event.IManager | undefined; + private _forkAddedSignal = new Signal(this); + private _forkDeletedSignal = new Signal(this); +} + +export namespace ForkManager { + export interface IOptions { + drive: ICollaborativeDrive; + eventManager: Event.IManager; + } +} diff --git a/packages/docprovider/src/index.ts b/packages/docprovider/src/index.ts index b3cd6572..178b7984 100644 --- a/packages/docprovider/src/index.ts +++ b/packages/docprovider/src/index.ts @@ -13,3 +13,5 @@ export * from './requests'; export * from './ydrive'; export * from './yprovider'; export * from './TimelineSlider'; +export * from './tokens'; +export * from './forkManager'; diff --git a/packages/docprovider/src/requests.ts b/packages/docprovider/src/requests.ts index 51a6ece3..0e374721 100644 --- a/packages/docprovider/src/requests.ts +++ b/packages/docprovider/src/requests.ts @@ -14,6 +14,8 @@ const DOC_SESSION_URL = 'api/collaboration/session'; const DOC_FORK_URL = 'api/collaboration/undo_redo'; const TIMELINE_URL = 'api/collaboration/timeline'; +export const ROOM_FORK_URL = 'api/collaboration/fork'; + /** * Document session model */ @@ -36,6 +38,45 @@ export interface ISessionModel { sessionId: string; } +/** + * Call the API extension + * + * @param endPoint API REST end point for the extension + * @param init Initial values for the request + * @returns The response body interpreted as JSON + */ +export async function requestAPI( + endPoint = '', + init: RequestInit = {} +): Promise { + // Make request to Jupyter API + const settings = ServerConnection.makeSettings(); + const requestUrl = URLExt.join(settings.baseUrl, endPoint); + + let response: Response; + try { + response = await ServerConnection.makeRequest(requestUrl, init, settings); + } catch (error) { + throw new ServerConnection.NetworkError(error as any); + } + + let data: any = await response.text(); + + if (data.length > 0) { + try { + data = JSON.parse(data); + } catch (error) { + console.error('Not a JSON response body.', response); + } + } + + if (!response.ok) { + throw new ServerConnection.ResponseError(response, data.message || data); + } + + return data; +} + export async function requestDocSession( format: string, type: string, diff --git a/packages/docprovider/src/tokens.ts b/packages/docprovider/src/tokens.ts new file mode 100644 index 00000000..c76b29cb --- /dev/null +++ b/packages/docprovider/src/tokens.ts @@ -0,0 +1,117 @@ +/* + * Copyright (c) Jupyter Development Team. + * Distributed under the terms of the Modified BSD License. + */ + +import { Token } from '@lumino/coreutils'; +import { IDisposable } from '@lumino/disposable'; +import { ISignal } from '@lumino/signaling'; +import { IForkProvider } from './ydrive'; +export interface IForkInfo { + description?: string; + root_roomid: string; + synchronize: boolean; + title?: string; +} + +export interface IForkCreationResponse { + fork_info: IForkInfo; + fork_roomid: string; + sessionId: string; +} + +export interface IAllForksResponse { + [forkId: string]: IForkInfo; +} + +export interface IForkChangedEvent { + fork_info: IForkInfo; + fork_roomid: string; + username?: string; +} + +/** + * Interface representing a Fork Manager that manages forked documents and + * provides signals for fork-related events. + * + * @interface IForkManager + * @extends IDisposable + */ +export interface IForkManager extends IDisposable { + /** + * Get the fork provider of a given document. + * + * @param options.documentPath - The document path including the + * drive prefix. + * @param options.format - Format of the document. + * @param options.type - Content type of the document. + * @returns The fork provider of the document. + */ + getProvider(options: { + documentPath: string; + format: string; + type: string; + }): IForkProvider | undefined; + + /** + * Creates a new fork for a given document. + * + * @param options.rootId - The ID of the root document to fork. + * @param options.synchronize - A flag indicating whether the fork should be kept + * synchronized with the root document. + * @param options.title - An optional label for the fork. + * @param options.description - An optional description for the fork. + * + * @returns A promise that resolves to an `IForkCreationResponse` if the fork + * is created successfully, or `undefined` if the creation fails. + */ + createFork(options: { + rootId: string; + synchronize: boolean; + title?: string; + description?: string; + }): Promise; + + /** + * Retrieves all forks associated with a specific document. + * + * @param documentId - The ID of the document for which forks are to be retrieved. + * + * @returns A promise that resolves to an `IAllForksResponse` containing information about all forks. + */ + getAllForks(documentId: string): Promise; + + /** + * Deletes a specified fork and optionally merges its changes. + * + * @param options - Options for deleting the fork. + * @param options.forkId - The ID of the fork to be deleted. + * @param options.merge - A flag indicating whether changes from the fork should be merged back into the root document. + * + * @returns A promise that resolves when the fork is successfully deleted. + */ + deleteFork(options: { forkId: string; merge: boolean }): Promise; + + /** + * Signal emitted when a new fork is added. + * + * @event forkAdded + * @type ISignal + */ + forkAdded: ISignal; + + /** + * Signal emitted when a fork is deleted. + * + * @event forkDeleted + * @type ISignal + */ + forkDeleted: ISignal; +} + +/** + * Token providing a fork manager instance. + */ +export const IForkManagerToken = new Token( + '@jupyter/docprovider:IForkManagerToken' +); diff --git a/packages/docprovider/src/ydrive.ts b/packages/docprovider/src/ydrive.ts index 96a9106c..68a95723 100644 --- a/packages/docprovider/src/ydrive.ts +++ b/packages/docprovider/src/ydrive.ts @@ -308,7 +308,6 @@ class SharedModelFactory implements ISharedModelFactory { // the `sharedModel` will be the default one. return; } - if (this.documentFactories.has(options.contentType)) { const factory = this.documentFactories.get(options.contentType)!; const sharedModel = factory(options); diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py b/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py index 7ac9b18f..a916c975 100644 --- a/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py @@ -157,7 +157,7 @@ async def _inner(format: str, type: str, path: str) -> Any: @pytest.fixture def rtc_connect_fork_client(jp_http_port, jp_base_url, rtc_fetch_session): async def _inner(room_id: str) -> Any: - return connect( + return aconnect_ws( f"ws://127.0.0.1:{jp_http_port}{jp_base_url}api/collaboration/room/{room_id}" ) From 47d71f8b0d03d10e6c6a5c7ad30f6ed0db5397a4 Mon Sep 17 00:00:00 2001 From: David Brochart Date: Fri, 29 Nov 2024 12:17:12 +0100 Subject: [PATCH 3/5] Fix test_fork_handler --- tests/test_handlers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 199dd64c..d1190db4 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -252,9 +252,8 @@ def _on_root_change(topic: str, event: Any) -> None: file_id = data["fileId"] root_roomid = f"text:file:{file_id}" - async with await rtc_connect_doc_client("text", "file", path) as ws, WebsocketProvider( - root_ydoc.ydoc, ws - ): + websocket, room_name = await rtc_connect_doc_client("text", "file", path) + async with websocket as ws, WebsocketProvider(root_ydoc.ydoc, Websocket(ws, room_name)): await root_connect_event.wait() resp = await rtc_create_fork_client(root_roomid, False, "my fork0", "is awesome0") From 046761394ac1ab523784897a2f11eaeb5ef5227a Mon Sep 17 00:00:00 2001 From: David Brochart Date: Fri, 29 Nov 2024 12:28:36 +0100 Subject: [PATCH 4/5] again --- tests/test_handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index d1190db4..05209592 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -317,7 +317,7 @@ def _on_fork_change(topic: str, event: Any) -> None: fork_text = fork_ydoc.ydoc.get("source", type=Text) async with await rtc_connect_fork_client(fork_roomid1) as ws, WebsocketProvider( - fork_ydoc.ydoc, ws + fork_ydoc.ydoc, Websocket(ws, fork_roomid1) ): await fork_connect_event.wait() root_text = root_ydoc.ydoc.get("source", type=Text) From 57fa5f5f43bd8539db663c6076fa2ad9038bfca2 Mon Sep 17 00:00:00 2001 From: David Brochart Date: Fri, 29 Nov 2024 13:08:51 +0100 Subject: [PATCH 5/5] Allow time for update to propagate --- .../jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py | 2 +- tests/test_handlers.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py b/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py index a916c975..ad86830f 100644 --- a/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py +++ b/projects/jupyter-server-ydoc/jupyter_server_ydoc/pytest_plugin.py @@ -158,7 +158,7 @@ async def _inner(format: str, type: str, path: str) -> Any: def rtc_connect_fork_client(jp_http_port, jp_base_url, rtc_fetch_session): async def _inner(room_id: str) -> Any: return aconnect_ws( - f"ws://127.0.0.1:{jp_http_port}{jp_base_url}api/collaboration/room/{room_id}" + f"http://127.0.0.1:{jp_http_port}{jp_base_url}api/collaboration/room/{room_id}" ) return _inner diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 05209592..6bfdd2f6 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -325,6 +325,7 @@ def _on_fork_change(topic: str, event: Any) -> None: await sleep(0.1) assert str(fork_text) == "Hello, World!" fork_text += " Hi!" + await sleep(0.1) await sleep(0.1) assert str(root_text) == "Hello, World!"