From d4e742367cf57f88f25ddc8b57ce8fefc4598c69 Mon Sep 17 00:00:00 2001 From: LongYinan Date: Wed, 25 Dec 2024 12:53:05 +0800 Subject: [PATCH] feat(mobile-native): add nbstore binding --- Cargo.lock | 63 +- Cargo.toml | 66 +- .../server/migrations/migration_lock.toml | 2 +- packages/common/native/Cargo.toml | 6 +- .../ios/App/App.xcodeproj/project.pbxproj | 8 + .../ios/App/App/AffineViewController.swift | 1 + .../App/Plugins/NBStore/NBStorePlugin.swift | 416 +++++ .../App/App/uniffi/affine_mobile_native.swift | 1629 ++++++++++++++++- .../App/App/uniffi/affine_mobile_nativeFFI.h | 329 ++++ .../apps/ios/App/xc-universal-binary.sh | 2 +- packages/frontend/apps/ios/package.json | 4 +- .../apps/ios/src/plugins/nbstore/blob.ts | 33 + .../apps/ios/src/plugins/nbstore/db.ts | 46 + .../ios/src/plugins/nbstore/definitions.ts | 124 ++ .../apps/ios/src/plugins/nbstore/doc.ts | 83 + .../apps/ios/src/plugins/nbstore/handlers.ts | 128 ++ .../apps/ios/src/plugins/nbstore/index.ts | 5 + .../apps/ios/src/plugins/nbstore/plugin.ts | 247 +++ .../apps/ios/src/plugins/nbstore/storage.ts | 83 + .../apps/ios/src/plugins/nbstore/sync.ts | 70 + .../src/modules/workspace-engine/index.ts | 1 + packages/frontend/mobile-native/Cargo.toml | 12 +- packages/frontend/mobile-native/src/error.rs | 19 + packages/frontend/mobile-native/src/lib.rs | 435 +++++ packages/frontend/native/Cargo.toml | 22 +- packages/frontend/native/nbstore/Cargo.toml | 12 +- packages/frontend/native/nbstore/src/blob.rs | 11 +- packages/frontend/native/nbstore/src/doc.rs | 13 +- packages/frontend/native/nbstore/src/lib.rs | 114 +- packages/frontend/native/package.json | 4 +- packages/frontend/native/schema/Cargo.toml | 2 +- packages/frontend/native/sqlite_v1/Cargo.toml | 4 +- tools/commitlint/.commitlintrc.json | 1 + yarn.lock | 2 + 34 files changed, 3849 insertions(+), 148 deletions(-) create mode 100644 packages/frontend/apps/ios/App/App/Plugins/NBStore/NBStorePlugin.swift create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/blob.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/db.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/doc.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/handlers.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/index.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/plugin.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/storage.ts create mode 100644 packages/frontend/apps/ios/src/plugins/nbstore/sync.ts create mode 100644 packages/frontend/mobile-native/src/error.rs diff --git a/Cargo.lock b/Cargo.lock index 34776a6aaba85..cf7c9d2a2a511 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,6 +33,12 @@ name = "affine_mobile_native" version = "0.0.0" dependencies = [ "affine_common", + "affine_nbstore", + "anyhow", + "base64-simd", + "chrono", + "sqlx", + "thiserror 2.0.8", "uniffi", ] @@ -64,6 +70,7 @@ dependencies = [ "napi-derive", "sqlx", "tokio", + "uniffi", ] [[package]] @@ -304,6 +311,16 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + [[package]] name = "base64ct" version = "1.6.0" @@ -437,7 +454,7 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1287,7 +1304,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -1600,6 +1617,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "outref" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" + [[package]] name = "overload" version = "0.1.1" @@ -2245,7 +2268,7 @@ dependencies = [ "sha2", "smallvec", "sqlformat", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -2330,7 +2353,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] @@ -2369,7 +2392,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] @@ -2489,7 +2512,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f5383f3e0071702bf93ab5ee99b52d26936be9dedd9413067cbdcddcb6141a" +dependencies = [ + "thiserror-impl 2.0.8", ] [[package]] @@ -2503,6 +2535,17 @@ dependencies = [ "syn", ] +[[package]] +name = "thiserror-impl" +version = "2.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f357fcec90b3caef6623a099691be676d033b40a058ac95d2a6ade6fa0c943" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "thread_local" version = "1.1.8" @@ -2905,6 +2948,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + [[package]] name = "walkdir" version = "2.5.0" @@ -3303,7 +3352,7 @@ dependencies = [ "serde", "serde_json", "smol_str", - "thiserror", + "thiserror 1.0.69", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 182327176791f..f7bd2ed786bf0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,40 +1,44 @@ [workspace] -members = [ - "./packages/backend/native", - "./packages/common/native", - "./packages/frontend/native", - "./packages/frontend/native/sqlite_v1", - "./packages/frontend/native/nbstore", - "./packages/frontend/native/schema", - "./packages/frontend/mobile-native", +members = [ + "./packages/backend/native", + "./packages/common/native", + "./packages/frontend/mobile-native", + "./packages/frontend/native", + "./packages/frontend/native/nbstore", + "./packages/frontend/native/schema", + "./packages/frontend/native/sqlite_v1", ] resolver = "2" [workspace.dependencies] -affine_common = { path = "./packages/common/native" } -criterion2 = { version = "2", default-features = false } -anyhow = "1" -chrono = "0.4" +affine_common = { path = "./packages/common/native" } +affine_nbstore = { path = "./packages/frontend/native/nbstore" } +anyhow = "1" +base64-simd = "0.8" +chrono = "0.4" +criterion2 = { version = "2", default-features = false } dotenvy = "0.15" -file-format = { version = "0.26", features = ["reader"] } -mimalloc = "0.1" -napi = { version = "3.0.0-alpha.12", features = ["async", "chrono_date", "error_anyhow", "napi9", "serde"] } -napi-build = { version = "2" } -napi-derive = { version = "3.0.0-alpha.12" } -notify = { version = "7", features = ["serde"] } -once_cell = "1" -parking_lot = "0.12" -rand = "0.8" -rayon = "1.10" -serde = "1" -serde_json = "1" -sha3 = "0.10" -sqlx = { version = "0.8", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } -tiktoken-rs = "0.6" -tokio = "1.37" -uuid = "1.8" -v_htmlescape = "0.15" -y-octo = { git = "https://github.com/y-crdt/y-octo.git", branch = "main" } +file-format = { version = "0.26", features = ["reader"] } +mimalloc = "0.1" +napi = { version = "3.0.0-alpha.12", features = ["async", "chrono_date", "error_anyhow", "napi9", "serde"] } +napi-build = { version = "2" } +napi-derive = { version = "3.0.0-alpha.12" } +notify = { version = "7", features = ["serde"] } +once_cell = "1" +parking_lot = "0.12" +rand = "0.8" +rayon = "1.10" +serde = "1" +serde_json = "1" +sha3 = "0.10" +sqlx = { version = "0.8", default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } +thiserror = "2" +tiktoken-rs = "0.6" +tokio = "1.37" +uniffi = "0.28" +uuid = "1.8" +v_htmlescape = "0.15" +y-octo = { git = "https://github.com/y-crdt/y-octo.git", branch = "main" } [profile.dev.package.sqlx-macros] opt-level = 3 diff --git a/packages/backend/server/migrations/migration_lock.toml b/packages/backend/server/migrations/migration_lock.toml index fbffa92c2bb7c..99e4f20090794 100644 --- a/packages/backend/server/migrations/migration_lock.toml +++ b/packages/backend/server/migrations/migration_lock.toml @@ -1,3 +1,3 @@ # Please do not edit this file manually # It should be added in your version-control system (i.e. Git) -provider = "postgresql" \ No newline at end of file +provider = "postgresql" diff --git a/packages/common/native/Cargo.toml b/packages/common/native/Cargo.toml index 7693d7e486e18..5d1ad3e3eb223 100644 --- a/packages/common/native/Cargo.toml +++ b/packages/common/native/Cargo.toml @@ -9,9 +9,9 @@ rand = { workspace = true } sha3 = { workspace = true } [dev-dependencies] -rayon = { workspace = true } -criterion2 = { workspace = true } +criterion2 = { workspace = true } +rayon = { workspace = true } [[bench]] -name = "hashcash" harness = false +name = "hashcash" diff --git a/packages/frontend/apps/ios/App/App.xcodeproj/project.pbxproj b/packages/frontend/apps/ios/App/App.xcodeproj/project.pbxproj index fe3bec13be596..6d9f0ec1c5f7b 100644 --- a/packages/frontend/apps/ios/App/App.xcodeproj/project.pbxproj +++ b/packages/frontend/apps/ios/App/App.xcodeproj/project.pbxproj @@ -60,6 +60,10 @@ FC68EB0AF532CFC21C3344DD /* Pods-App.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-App.debug.xcconfig"; path = "Pods/Target Support Files/Pods-App/Pods-App.debug.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ +/* Begin PBXFileSystemSynchronizedRootGroup section */ + C45499AB2D140B5000E21978 /* NBStore */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = NBStore; sourceTree = ""; }; +/* End PBXFileSystemSynchronizedRootGroup section */ + /* Begin PBXFrameworksBuildPhase section */ 504EC3011FED79650016851F /* Frameworks */ = { isa = PBXFrameworksBuildPhase; @@ -139,6 +143,7 @@ 9D90BE1A2CCB9876006677DB /* Plugins */ = { isa = PBXGroup; children = ( + C45499AB2D140B5000E21978 /* NBStore */, E93B276A2CED9298001409B8 /* NavigationGesture */, 9D90BE192CCB9876006677DB /* Cookie */, ); @@ -201,6 +206,9 @@ ); dependencies = ( ); + fileSystemSynchronizedGroups = ( + C45499AB2D140B5000E21978 /* NBStore */, + ); name = App; productName = App; productReference = 504EC3041FED79650016851F /* App.app */; diff --git a/packages/frontend/apps/ios/App/App/AffineViewController.swift b/packages/frontend/apps/ios/App/App/AffineViewController.swift index 3f3e25d7b8cc6..773677f554995 100644 --- a/packages/frontend/apps/ios/App/App/AffineViewController.swift +++ b/packages/frontend/apps/ios/App/App/AffineViewController.swift @@ -21,6 +21,7 @@ class AFFiNEViewController: CAPBridgeViewController { HashcashPlugin(), NavigationGesturePlugin(), IntelligentsPlugin(representController: self), + NbStorePlugin(), ] plugins.forEach { bridge?.registerPluginInstance($0) } } diff --git a/packages/frontend/apps/ios/App/App/Plugins/NBStore/NBStorePlugin.swift b/packages/frontend/apps/ios/App/App/Plugins/NBStore/NBStorePlugin.swift new file mode 100644 index 0000000000000..a47303ad2a5d2 --- /dev/null +++ b/packages/frontend/apps/ios/App/App/Plugins/NBStore/NBStorePlugin.swift @@ -0,0 +1,416 @@ +import Capacitor +import Foundation + +@objc(NbStorePlugin) +public class NbStorePlugin: CAPPlugin, CAPBridgedPlugin { + private var docStorage: DocStorage? + + public let identifier = "NbStorePlugin" + public let jsName = "NbStoreDocStorage" + public let pluginMethods: [CAPPluginMethod] = [ + CAPPluginMethod(name: "getSpaceDBPath", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "create", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "connect", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "close", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "isClosed", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "checkpoint", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "validate", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "setSpaceId", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "pushUpdate", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getDocSnapshot", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "setDocSnapshot", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getDocUpdates", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "markUpdatesMerged", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "deleteDoc", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getDocClocks", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getDocClock", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getBlob", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "setBlob", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "deleteBlob", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "releaseBlobs", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "listBlobs", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getPeerRemoteClocks", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getPeerRemoteClock", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "setPeerRemoteClock", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getPeerPulledRemoteClocks", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getPeerPulledRemoteClock", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "setPeerPulledRemoteClock", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "getPeerPushedClocks", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "setPeerPushedClock", returnType: CAPPluginReturnPromise), + CAPPluginMethod(name: "clearClocks", returnType: CAPPluginReturnPromise), + ] + + private func escapeFilename(_ name: String) -> String { + // Replace special characters with '_' and collapse multiple '_' into one + let escaped = name.replacingOccurrences( + of: "[\\\\!@#$%^&*()+~`\"':;,?<>|]", + with: "_", + options: .regularExpression + ) + return escaped.replacingOccurrences( + of: "_+", + with: "_", + options: .regularExpression + ).trimmingCharacters(in: CharacterSet(charactersIn: "_")) + } + + private func getSpaceDBPath(peer: String, spaceType: String, id: String) -> String? { + guard let documentsPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else { + return nil + } + + let spacesDir = spaceType == "userspace" ? "userspaces" : "workspaces" + let dbPath = documentsPath + .appendingPathComponent(".affine") + .appendingPathComponent(spacesDir) + .appendingPathComponent(escapeFilename(peer)) + .appendingPathComponent(id) + + // Create directories if they don't exist + try? FileManager.default.createDirectory(at: dbPath, withIntermediateDirectories: true) + + return dbPath.appendingPathComponent("storage.db").path + } + + @objc func getSpaceDBPath(_ call: CAPPluginCall) { + let peer = call.getString("peer") ?? "" + let spaceType = call.getString("spaceType") ?? "workspace" + let id = call.getString("id") ?? "" + + if let path = getSpaceDBPath(peer: peer, spaceType: spaceType, id: id) { + call.resolve(["path": path]) + } else { + call.reject("Could not access Documents directory") + } + } + + @objc func create(_ call: CAPPluginCall) { + let path = call.getString("path") ?? "" + docStorage = try? DocStorage(path: path) + } + + @objc func connect(_: CAPPluginCall) async { + try? await docStorage?.connect() + } + + @objc func close(_: CAPPluginCall) async { + try? await docStorage?.close() + } + + @objc func isClosed(_ call: CAPPluginCall) { + call.resolve(["isClosed": docStorage?.isClosed() ?? true]) + } + + @objc func checkpoint(_: CAPPluginCall) async { + try? await docStorage?.checkpoint() + } + + @objc func validate(_ call: CAPPluginCall) async { + let validate = (try? await docStorage?.validate()) ?? false + call.resolve(["isValidate": validate]) + } + + @objc func setSpaceId(_ call: CAPPluginCall) async { + let spaceId = call.getString("spaceId") ?? "" + do { + try await docStorage?.setSpaceId(spaceId: spaceId) + call.resolve() + } catch { + call.reject("Failed to set space id", nil, error) + } + } + + @objc func pushUpdate(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + let data = call.getString("data") ?? "" + do { + if let timestamp = try await docStorage?.pushUpdate(docId: docId, update: data) { + call.resolve(["timestamp": timestamp.timeIntervalSince1970]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to push update", nil, error) + } + } + + @objc func getDocSnapshot(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + do { + if let record = try await docStorage?.getDocSnapshot(docId: docId) { + call.resolve([ + "docId": record.docId, + "data": record.data, + "timestamp": record.timestamp.timeIntervalSince1970, + ]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get doc snapshot", nil, error) + } + } + + @objc func setDocSnapshot(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + let data = call.getString("data") ?? "" + let timestamp = Date() + do { + let success = try await docStorage!.setDocSnapshot( + snapshot: DocRecord(docId: docId, data: data, timestamp: timestamp) + ) + call.resolve(["success": success]) + } catch { + call.reject("Failed to set doc snapshot", nil, error) + } + } + + @objc func getDocUpdates(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + do { + let updates = try await docStorage!.getDocUpdates(docId: docId) + let mapped = updates.map { [ + "docId": $0.docId, + "createdAt": $0.createdAt.timeIntervalSince1970, + "data": $0.data, + ] } + call.resolve(["updates": mapped]) + } catch { + call.reject("Failed to get doc updates", nil, error) + } + } + + @objc func markUpdatesMerged(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + let times = call.getArray("timestamps", Double.self) ?? [] + let dateArray = times.map { Date(timeIntervalSince1970: $0) } + do { + let count = try await docStorage!.markUpdatesMerged(docId: docId, updates: dateArray) + call.resolve(["count": count]) + } catch { + call.reject("Failed to mark updates merged", nil, error) + } + } + + @objc func deleteDoc(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + do { + try await docStorage?.deleteDoc(docId: docId) + call.resolve() + } catch { + call.reject("Failed to delete doc", nil, error) + } + } + + @objc func getDocClocks(_ call: CAPPluginCall) async { + do { + let after = call.getInt("after") + let docClocks = try await docStorage!.getDocClocks(after: after != nil ? Date(timeIntervalSince1970: TimeInterval(after!)) : nil) + let mapped = docClocks.map { [ + "docId": $0.docId, + "timestamp": $0.timestamp.timeIntervalSince1970, + ] } + call.resolve(["clocks": mapped]) + } catch { + call.reject("Failed to get doc clocks", nil, error) + } + } + + @objc func getDocClock(_ call: CAPPluginCall) async { + let docId = call.getString("docId") ?? "" + do { + if let docClock = try await docStorage!.getDocClock(docId: docId) { + call.resolve([ + "docId": docClock.docId, + "timestamp": docClock.timestamp.timeIntervalSince1970, + ]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get doc clock for docId: \(docId)", nil, error) + } + } + + @objc func getBlob(_ call: CAPPluginCall) async { + let key = call.getString("key") ?? "" + if let blob = try? await docStorage!.getBlob(key: key) { + call.resolve(["blob": blob]) + } else { + call.resolve() + } + } + + @objc func setBlob(_ call: CAPPluginCall) async { + let key = call.getString("key") ?? "" + let data = call.getString("data") ?? "" + let mime = call.getString("mime") ?? "" + try? await docStorage?.setBlob(blob: SetBlob(key: key, data: data, mime: mime)) + } + + @objc func deleteBlob(_ call: CAPPluginCall) async { + let key = call.getString("key") ?? "" + let permanently = call.getBool("permanently") ?? false + try? await docStorage?.deleteBlob(key: key, permanently: permanently) + } + + @objc func releaseBlobs(_: CAPPluginCall) async { + try? await docStorage?.releaseBlobs() + } + + @objc func listBlobs(_ call: CAPPluginCall) async { + if let blobs = try? await docStorage?.listBlobs() { + let mapped = blobs.map { [ + "key": $0.key, + "size": $0.size, + "mime": $0.mime, + "createdAt": $0.createdAt.timeIntervalSince1970, + ] } + call.resolve(["blobs": mapped]) + } else { + call.resolve() + } + } + + @objc func getPeerRemoteClocks(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + do { + if let clocks = try await docStorage?.getPeerRemoteClocks(peer: peer) { + let mapped = clocks.map { [ + "docId": $0.docId, + "timestamp": $0.timestamp.timeIntervalSince1970, + ] } + call.resolve(["clocks": mapped]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get peer remote clocks", nil, error) + } + } + + @objc func getPeerRemoteClock(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + let docId = call.getString("docId") ?? "" + do { + if let clock = try await docStorage?.getPeerRemoteClock(peer: peer, docId: docId) { + call.resolve([ + "docId": clock.docId, + "timestamp": clock.timestamp.timeIntervalSince1970, + ]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get peer remote clock", nil, error) + } + } + + @objc func setPeerRemoteClock(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + let docId = call.getString("docId") ?? "" + let timestamp = call.getDouble("timestamp") ?? 0 + do { + try await docStorage?.setPeerRemoteClock( + peer: peer, + docId: docId, + clock: Date(timeIntervalSince1970: timestamp) + ) + call.resolve() + } catch { + call.reject("Failed to set peer remote clock", nil, error) + } + } + + @objc func getPeerPulledRemoteClocks(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + do { + if let clocks = try await docStorage?.getPeerPulledRemoteClocks(peer: peer) { + let mapped = clocks.map { [ + "docId": $0.docId, + "timestamp": $0.timestamp.timeIntervalSince1970, + ] } + call.resolve(["clocks": mapped]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get peer pulled remote clocks", nil, error) + } + } + + @objc func getPeerPulledRemoteClock(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + let docId = call.getString("docId") ?? "" + do { + if let clock = try await docStorage?.getPeerPulledRemoteClock(peer: peer, docId: docId) { + call.resolve([ + "docId": clock.docId, + "timestamp": clock.timestamp.timeIntervalSince1970, + ]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get peer pulled remote clock", nil, error) + } + } + + @objc func setPeerPulledRemoteClock(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + let docId = call.getString("docId") ?? "" + let timestamp = call.getDouble("timestamp") ?? 0 + do { + try await docStorage?.setPeerPulledRemoteClock( + peer: peer, + docId: docId, + clock: Date(timeIntervalSince1970: timestamp) + ) + call.resolve() + } catch { + call.reject("Failed to set peer pulled remote clock", nil, error) + } + } + + @objc func getPeerPushedClocks(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + do { + if let clocks = try await docStorage?.getPeerPushedClocks(peer: peer) { + let mapped = clocks.map { [ + "docId": $0.docId, + "timestamp": $0.timestamp.timeIntervalSince1970, + ] } + call.resolve(["clocks": mapped]) + } else { + call.resolve() + } + } catch { + call.reject("Failed to get peer pushed clocks", nil, error) + } + } + + @objc func setPeerPushedClock(_ call: CAPPluginCall) async { + let peer = call.getString("peer") ?? "" + let docId = call.getString("docId") ?? "" + let timestamp = call.getDouble("timestamp") ?? 0 + do { + try await docStorage?.setPeerPushedClock( + peer: peer, + docId: docId, + clock: Date(timeIntervalSince1970: timestamp) + ) + call.resolve() + } catch { + call.reject("Failed to set peer pushed clock", nil, error) + } + } + + @objc func clearClocks(_ call: CAPPluginCall) async { + do { + try await docStorage?.clearClocks() + call.resolve() + } catch { + call.reject("Failed to clear clocks", nil, error) + } + } +} diff --git a/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_native.swift b/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_native.swift index 30d7930ce7165..828bca1f3686e 100644 --- a/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_native.swift +++ b/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_native.swift @@ -412,6 +412,46 @@ fileprivate struct FfiConverterUInt32: FfiConverterPrimitive { } } +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterInt64: FfiConverterPrimitive { + typealias FfiType = Int64 + typealias SwiftType = Int64 + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Int64 { + return try lift(readInt(&buf)) + } + + public static func write(_ value: Int64, into buf: inout [UInt8]) { + writeInt(&buf, lower(value)) + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterBool : FfiConverter { + typealias FfiType = Int8 + typealias SwiftType = Bool + + public static func lift(_ value: Int8) throws -> Bool { + return value != 0 + } + + public static func lower(_ value: Bool) -> Int8 { + return value ? 1 : 0 + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bool { + return try lift(readInt(&buf)) + } + + public static func write(_ value: Bool, into buf: inout [UInt8]) { + writeInt(&buf, lower(value)) + } +} + #if swift(>=5.8) @_documentation(visibility: private) #endif @@ -452,31 +492,1582 @@ fileprivate struct FfiConverterString: FfiConverter { writeBytes(&buf, value.utf8) } } -public func hashcashMint(resource: String, bits: UInt32) -> String { - return try! FfiConverterString.lift(try! rustCall() { - uniffi_affine_mobile_native_fn_func_hashcash_mint( - FfiConverterString.lower(resource), - FfiConverterUInt32.lower(bits),$0 + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterTimestamp: FfiConverterRustBuffer { + typealias SwiftType = Date + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Date { + let seconds: Int64 = try readInt(&buf) + let nanoseconds: UInt32 = try readInt(&buf) + if seconds >= 0 { + let delta = Double(seconds) + (Double(nanoseconds) / 1.0e9) + return Date.init(timeIntervalSince1970: delta) + } else { + let delta = Double(seconds) - (Double(nanoseconds) / 1.0e9) + return Date.init(timeIntervalSince1970: delta) + } + } + + public static func write(_ value: Date, into buf: inout [UInt8]) { + var delta = value.timeIntervalSince1970 + var sign: Int64 = 1 + if delta < 0 { + // The nanoseconds portion of the epoch offset must always be + // positive, to simplify the calculation we will use the absolute + // value of the offset. + sign = -1 + delta = -delta + } + if delta.rounded(.down) > Double(Int64.max) { + fatalError("Timestamp overflow, exceeds max bounds supported by Uniffi") + } + let seconds = Int64(delta) + let nanoseconds = UInt32((delta - Double(seconds)) * 1.0e9) + writeInt(&buf, sign * seconds) + writeInt(&buf, nanoseconds) + } +} + + + + +public protocol DocStorageProtocol : AnyObject { + + func checkpoint() async throws + + func clearClocks() async throws + + func close() async throws + + /** + * Initialize the database and run migrations. + */ + func connect() async throws + + func deleteBlob(key: String, permanently: Bool) async throws + + func deleteDoc(docId: String) async throws + + func getBlob(key: String) async throws -> Blob? + + func getDocClock(docId: String) async throws -> DocClock? + + func getDocClocks(after: Date?) async throws -> [DocClock] + + func getDocSnapshot(docId: String) async throws -> DocRecord? + + func getDocUpdates(docId: String) async throws -> [DocUpdate] + + func getPeerPulledRemoteClock(peer: String, docId: String) async throws -> DocClock + + func getPeerPulledRemoteClocks(peer: String) async throws -> [DocClock] + + func getPeerPushedClocks(peer: String) async throws -> [DocClock] + + func getPeerRemoteClock(peer: String, docId: String) async throws -> DocClock + + func getPeerRemoteClocks(peer: String) async throws -> [DocClock] + + func isClosed() -> Bool + + func listBlobs() async throws -> [ListedBlob] + + func markUpdatesMerged(docId: String, updates: [Date]) async throws -> UInt32 + + func pushUpdate(docId: String, update: String) async throws -> Date + + func releaseBlobs() async throws + + func setBlob(blob: SetBlob) async throws + + func setDocSnapshot(snapshot: DocRecord) async throws -> Bool + + func setPeerPulledRemoteClock(peer: String, docId: String, clock: Date) async throws + + func setPeerPushedClock(peer: String, docId: String, clock: Date) async throws + + func setPeerRemoteClock(peer: String, docId: String, clock: Date) async throws + + func setSpaceId(spaceId: String) async throws + + func validate() async throws -> Bool + +} + +open class DocStorage: + DocStorageProtocol { + fileprivate let pointer: UnsafeMutableRawPointer! + + /// Used to instantiate a [FFIObject] without an actual pointer, for fakes in tests, mostly. +#if swift(>=5.8) + @_documentation(visibility: private) +#endif + public struct NoPointer { + public init() {} + } + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required public init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + // This constructor can be used to instantiate a fake object. + // - Parameter noPointer: Placeholder value so we can have a constructor separate from the default empty one that may be implemented for classes extending [FFIObject]. + // + // - Warning: + // Any object instantiated with this constructor cannot be passed to an actual Rust-backed object. Since there isn't a backing [Pointer] the FFI lower functions will crash. +#if swift(>=5.8) + @_documentation(visibility: private) +#endif + public init(noPointer: NoPointer) { + self.pointer = nil + } + +#if swift(>=5.8) + @_documentation(visibility: private) +#endif + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_affine_mobile_native_fn_clone_docstorage(self.pointer, $0) } + } +public convenience init(path: String)throws { + let pointer = + try rustCallWithError(FfiConverterTypeUniffiError.lift) { + uniffi_affine_mobile_native_fn_constructor_docstorage_new( + FfiConverterString.lower(path),$0 + ) +} + self.init(unsafeFromRawPointer: pointer) +} + + deinit { + guard let pointer = pointer else { + return + } + + try! rustCall { uniffi_affine_mobile_native_fn_free_docstorage(pointer, $0) } + } + + + + +open func checkpoint()async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_checkpoint( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func clearClocks()async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_clear_clocks( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func close()async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_close( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + + /** + * Initialize the database and run migrations. + */ +open func connect()async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_connect( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func deleteBlob(key: String, permanently: Bool)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_delete_blob( + self.uniffiClonePointer(), + FfiConverterString.lower(key),FfiConverterBool.lower(permanently) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func deleteDoc(docId: String)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_delete_doc( + self.uniffiClonePointer(), + FfiConverterString.lower(docId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getBlob(key: String)async throws -> Blob? { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_blob( + self.uniffiClonePointer(), + FfiConverterString.lower(key) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterOptionTypeBlob.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getDocClock(docId: String)async throws -> DocClock? { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_doc_clock( + self.uniffiClonePointer(), + FfiConverterString.lower(docId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterOptionTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getDocClocks(after: Date?)async throws -> [DocClock] { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_doc_clocks( + self.uniffiClonePointer(), + FfiConverterOptionTimestamp.lower(after) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterSequenceTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getDocSnapshot(docId: String)async throws -> DocRecord? { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_doc_snapshot( + self.uniffiClonePointer(), + FfiConverterString.lower(docId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterOptionTypeDocRecord.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getDocUpdates(docId: String)async throws -> [DocUpdate] { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_doc_updates( + self.uniffiClonePointer(), + FfiConverterString.lower(docId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterSequenceTypeDocUpdate.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getPeerPulledRemoteClock(peer: String, docId: String)async throws -> DocClock { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_peer_pulled_remote_clock( + self.uniffiClonePointer(), + FfiConverterString.lower(peer),FfiConverterString.lower(docId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getPeerPulledRemoteClocks(peer: String)async throws -> [DocClock] { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_peer_pulled_remote_clocks( + self.uniffiClonePointer(), + FfiConverterString.lower(peer) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterSequenceTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getPeerPushedClocks(peer: String)async throws -> [DocClock] { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_peer_pushed_clocks( + self.uniffiClonePointer(), + FfiConverterString.lower(peer) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterSequenceTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getPeerRemoteClock(peer: String, docId: String)async throws -> DocClock { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_peer_remote_clock( + self.uniffiClonePointer(), + FfiConverterString.lower(peer),FfiConverterString.lower(docId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func getPeerRemoteClocks(peer: String)async throws -> [DocClock] { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_get_peer_remote_clocks( + self.uniffiClonePointer(), + FfiConverterString.lower(peer) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterSequenceTypeDocClock.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func isClosed() -> Bool { + return try! FfiConverterBool.lift(try! rustCall() { + uniffi_affine_mobile_native_fn_method_docstorage_is_closed(self.uniffiClonePointer(),$0 ) }) } + +open func listBlobs()async throws -> [ListedBlob] { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_list_blobs( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterSequenceTypeListedBlob.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func markUpdatesMerged(docId: String, updates: [Date])async throws -> UInt32 { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_mark_updates_merged( + self.uniffiClonePointer(), + FfiConverterString.lower(docId),FfiConverterSequenceTimestamp.lower(updates) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_u32, + completeFunc: ffi_affine_mobile_native_rust_future_complete_u32, + freeFunc: ffi_affine_mobile_native_rust_future_free_u32, + liftFunc: FfiConverterUInt32.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func pushUpdate(docId: String, update: String)async throws -> Date { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_push_update( + self.uniffiClonePointer(), + FfiConverterString.lower(docId),FfiConverterString.lower(update) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_rust_buffer, + completeFunc: ffi_affine_mobile_native_rust_future_complete_rust_buffer, + freeFunc: ffi_affine_mobile_native_rust_future_free_rust_buffer, + liftFunc: FfiConverterTimestamp.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func releaseBlobs()async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_release_blobs( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func setBlob(blob: SetBlob)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_set_blob( + self.uniffiClonePointer(), + FfiConverterTypeSetBlob.lower(blob) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func setDocSnapshot(snapshot: DocRecord)async throws -> Bool { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_set_doc_snapshot( + self.uniffiClonePointer(), + FfiConverterTypeDocRecord.lower(snapshot) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_i8, + completeFunc: ffi_affine_mobile_native_rust_future_complete_i8, + freeFunc: ffi_affine_mobile_native_rust_future_free_i8, + liftFunc: FfiConverterBool.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func setPeerPulledRemoteClock(peer: String, docId: String, clock: Date)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_set_peer_pulled_remote_clock( + self.uniffiClonePointer(), + FfiConverterString.lower(peer),FfiConverterString.lower(docId),FfiConverterTimestamp.lower(clock) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func setPeerPushedClock(peer: String, docId: String, clock: Date)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_set_peer_pushed_clock( + self.uniffiClonePointer(), + FfiConverterString.lower(peer),FfiConverterString.lower(docId),FfiConverterTimestamp.lower(clock) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func setPeerRemoteClock(peer: String, docId: String, clock: Date)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_set_peer_remote_clock( + self.uniffiClonePointer(), + FfiConverterString.lower(peer),FfiConverterString.lower(docId),FfiConverterTimestamp.lower(clock) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func setSpaceId(spaceId: String)async throws { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_set_space_id( + self.uniffiClonePointer(), + FfiConverterString.lower(spaceId) + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_void, + completeFunc: ffi_affine_mobile_native_rust_future_complete_void, + freeFunc: ffi_affine_mobile_native_rust_future_free_void, + liftFunc: { $0 }, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + +open func validate()async throws -> Bool { + return + try await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_affine_mobile_native_fn_method_docstorage_validate( + self.uniffiClonePointer() + + ) + }, + pollFunc: ffi_affine_mobile_native_rust_future_poll_i8, + completeFunc: ffi_affine_mobile_native_rust_future_complete_i8, + freeFunc: ffi_affine_mobile_native_rust_future_free_i8, + liftFunc: FfiConverterBool.lift, + errorHandler: FfiConverterTypeUniffiError.lift + ) +} + -private enum InitializationResult { - case ok - case contractVersionMismatch - case apiChecksumMismatch } -// Use a global variable to perform the versioning checks. Swift ensures that -// the code inside is only computed once. -private var initializationResult: InitializationResult = { - // Get the bindings contract version from our ComponentInterface - let bindings_contract_version = 26 - // Get the scaffolding contract version by calling the into the dylib - let scaffolding_contract_version = ffi_affine_mobile_native_uniffi_contract_version() - if bindings_contract_version != scaffolding_contract_version { - return InitializationResult.contractVersionMismatch + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeDocStorage: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = DocStorage + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> DocStorage { + return DocStorage(unsafeFromRawPointer: pointer) } - if (uniffi_affine_mobile_native_checksum_func_hashcash_mint() != 23633) { + + public static func lower(_ value: DocStorage) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> DocStorage { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: DocStorage, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocStorage_lift(_ pointer: UnsafeMutableRawPointer) throws -> DocStorage { + return try FfiConverterTypeDocStorage.lift(pointer) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocStorage_lower(_ value: DocStorage) -> UnsafeMutableRawPointer { + return FfiConverterTypeDocStorage.lower(value) +} + + +public struct Blob { + public var key: String + public var data: String + public var mime: String + public var size: Int64 + public var createdAt: Date + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init(key: String, data: String, mime: String, size: Int64, createdAt: Date) { + self.key = key + self.data = data + self.mime = mime + self.size = size + self.createdAt = createdAt + } +} + + + +extension Blob: Equatable, Hashable { + public static func ==(lhs: Blob, rhs: Blob) -> Bool { + if lhs.key != rhs.key { + return false + } + if lhs.data != rhs.data { + return false + } + if lhs.mime != rhs.mime { + return false + } + if lhs.size != rhs.size { + return false + } + if lhs.createdAt != rhs.createdAt { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(key) + hasher.combine(data) + hasher.combine(mime) + hasher.combine(size) + hasher.combine(createdAt) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeBlob: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Blob { + return + try Blob( + key: FfiConverterString.read(from: &buf), + data: FfiConverterString.read(from: &buf), + mime: FfiConverterString.read(from: &buf), + size: FfiConverterInt64.read(from: &buf), + createdAt: FfiConverterTimestamp.read(from: &buf) + ) + } + + public static func write(_ value: Blob, into buf: inout [UInt8]) { + FfiConverterString.write(value.key, into: &buf) + FfiConverterString.write(value.data, into: &buf) + FfiConverterString.write(value.mime, into: &buf) + FfiConverterInt64.write(value.size, into: &buf) + FfiConverterTimestamp.write(value.createdAt, into: &buf) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeBlob_lift(_ buf: RustBuffer) throws -> Blob { + return try FfiConverterTypeBlob.lift(buf) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeBlob_lower(_ value: Blob) -> RustBuffer { + return FfiConverterTypeBlob.lower(value) +} + + +public struct DocClock { + public var docId: String + public var timestamp: Date + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init(docId: String, timestamp: Date) { + self.docId = docId + self.timestamp = timestamp + } +} + + + +extension DocClock: Equatable, Hashable { + public static func ==(lhs: DocClock, rhs: DocClock) -> Bool { + if lhs.docId != rhs.docId { + return false + } + if lhs.timestamp != rhs.timestamp { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(docId) + hasher.combine(timestamp) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeDocClock: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> DocClock { + return + try DocClock( + docId: FfiConverterString.read(from: &buf), + timestamp: FfiConverterTimestamp.read(from: &buf) + ) + } + + public static func write(_ value: DocClock, into buf: inout [UInt8]) { + FfiConverterString.write(value.docId, into: &buf) + FfiConverterTimestamp.write(value.timestamp, into: &buf) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocClock_lift(_ buf: RustBuffer) throws -> DocClock { + return try FfiConverterTypeDocClock.lift(buf) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocClock_lower(_ value: DocClock) -> RustBuffer { + return FfiConverterTypeDocClock.lower(value) +} + + +public struct DocRecord { + public var docId: String + public var data: String + public var timestamp: Date + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init(docId: String, data: String, timestamp: Date) { + self.docId = docId + self.data = data + self.timestamp = timestamp + } +} + + + +extension DocRecord: Equatable, Hashable { + public static func ==(lhs: DocRecord, rhs: DocRecord) -> Bool { + if lhs.docId != rhs.docId { + return false + } + if lhs.data != rhs.data { + return false + } + if lhs.timestamp != rhs.timestamp { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(docId) + hasher.combine(data) + hasher.combine(timestamp) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeDocRecord: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> DocRecord { + return + try DocRecord( + docId: FfiConverterString.read(from: &buf), + data: FfiConverterString.read(from: &buf), + timestamp: FfiConverterTimestamp.read(from: &buf) + ) + } + + public static func write(_ value: DocRecord, into buf: inout [UInt8]) { + FfiConverterString.write(value.docId, into: &buf) + FfiConverterString.write(value.data, into: &buf) + FfiConverterTimestamp.write(value.timestamp, into: &buf) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocRecord_lift(_ buf: RustBuffer) throws -> DocRecord { + return try FfiConverterTypeDocRecord.lift(buf) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocRecord_lower(_ value: DocRecord) -> RustBuffer { + return FfiConverterTypeDocRecord.lower(value) +} + + +public struct DocUpdate { + public var docId: String + public var createdAt: Date + public var data: String + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init(docId: String, createdAt: Date, data: String) { + self.docId = docId + self.createdAt = createdAt + self.data = data + } +} + + + +extension DocUpdate: Equatable, Hashable { + public static func ==(lhs: DocUpdate, rhs: DocUpdate) -> Bool { + if lhs.docId != rhs.docId { + return false + } + if lhs.createdAt != rhs.createdAt { + return false + } + if lhs.data != rhs.data { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(docId) + hasher.combine(createdAt) + hasher.combine(data) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeDocUpdate: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> DocUpdate { + return + try DocUpdate( + docId: FfiConverterString.read(from: &buf), + createdAt: FfiConverterTimestamp.read(from: &buf), + data: FfiConverterString.read(from: &buf) + ) + } + + public static func write(_ value: DocUpdate, into buf: inout [UInt8]) { + FfiConverterString.write(value.docId, into: &buf) + FfiConverterTimestamp.write(value.createdAt, into: &buf) + FfiConverterString.write(value.data, into: &buf) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocUpdate_lift(_ buf: RustBuffer) throws -> DocUpdate { + return try FfiConverterTypeDocUpdate.lift(buf) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeDocUpdate_lower(_ value: DocUpdate) -> RustBuffer { + return FfiConverterTypeDocUpdate.lower(value) +} + + +public struct ListedBlob { + public var key: String + public var size: Int64 + public var mime: String + public var createdAt: Date + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init(key: String, size: Int64, mime: String, createdAt: Date) { + self.key = key + self.size = size + self.mime = mime + self.createdAt = createdAt + } +} + + + +extension ListedBlob: Equatable, Hashable { + public static func ==(lhs: ListedBlob, rhs: ListedBlob) -> Bool { + if lhs.key != rhs.key { + return false + } + if lhs.size != rhs.size { + return false + } + if lhs.mime != rhs.mime { + return false + } + if lhs.createdAt != rhs.createdAt { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(key) + hasher.combine(size) + hasher.combine(mime) + hasher.combine(createdAt) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeListedBlob: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ListedBlob { + return + try ListedBlob( + key: FfiConverterString.read(from: &buf), + size: FfiConverterInt64.read(from: &buf), + mime: FfiConverterString.read(from: &buf), + createdAt: FfiConverterTimestamp.read(from: &buf) + ) + } + + public static func write(_ value: ListedBlob, into buf: inout [UInt8]) { + FfiConverterString.write(value.key, into: &buf) + FfiConverterInt64.write(value.size, into: &buf) + FfiConverterString.write(value.mime, into: &buf) + FfiConverterTimestamp.write(value.createdAt, into: &buf) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeListedBlob_lift(_ buf: RustBuffer) throws -> ListedBlob { + return try FfiConverterTypeListedBlob.lift(buf) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeListedBlob_lower(_ value: ListedBlob) -> RustBuffer { + return FfiConverterTypeListedBlob.lower(value) +} + + +public struct SetBlob { + public var key: String + public var data: String + public var mime: String + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init(key: String, data: String, mime: String) { + self.key = key + self.data = data + self.mime = mime + } +} + + + +extension SetBlob: Equatable, Hashable { + public static func ==(lhs: SetBlob, rhs: SetBlob) -> Bool { + if lhs.key != rhs.key { + return false + } + if lhs.data != rhs.data { + return false + } + if lhs.mime != rhs.mime { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(key) + hasher.combine(data) + hasher.combine(mime) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeSetBlob: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SetBlob { + return + try SetBlob( + key: FfiConverterString.read(from: &buf), + data: FfiConverterString.read(from: &buf), + mime: FfiConverterString.read(from: &buf) + ) + } + + public static func write(_ value: SetBlob, into buf: inout [UInt8]) { + FfiConverterString.write(value.key, into: &buf) + FfiConverterString.write(value.data, into: &buf) + FfiConverterString.write(value.mime, into: &buf) + } +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeSetBlob_lift(_ buf: RustBuffer) throws -> SetBlob { + return try FfiConverterTypeSetBlob.lift(buf) +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public func FfiConverterTypeSetBlob_lower(_ value: SetBlob) -> RustBuffer { + return FfiConverterTypeSetBlob.lower(value) +} + + +public enum UniffiError { + + + + case EmptyDocStoragePath + case EmptySpaceId + case SqlxError(String + ) + case Base64DecodingError(String + ) +} + + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +public struct FfiConverterTypeUniffiError: FfiConverterRustBuffer { + typealias SwiftType = UniffiError + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> UniffiError { + let variant: Int32 = try readInt(&buf) + switch variant { + + + + + case 1: return .EmptyDocStoragePath + case 2: return .EmptySpaceId + case 3: return .SqlxError( + try FfiConverterString.read(from: &buf) + ) + case 4: return .Base64DecodingError( + try FfiConverterString.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: UniffiError, into buf: inout [UInt8]) { + switch value { + + + + + + case .EmptyDocStoragePath: + writeInt(&buf, Int32(1)) + + + case .EmptySpaceId: + writeInt(&buf, Int32(2)) + + + case let .SqlxError(v1): + writeInt(&buf, Int32(3)) + FfiConverterString.write(v1, into: &buf) + + + case let .Base64DecodingError(v1): + writeInt(&buf, Int32(4)) + FfiConverterString.write(v1, into: &buf) + + } + } +} + + +extension UniffiError: Equatable, Hashable {} + +extension UniffiError: Foundation.LocalizedError { + public var errorDescription: String? { + String(reflecting: self) + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterOptionTimestamp: FfiConverterRustBuffer { + typealias SwiftType = Date? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTimestamp.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTimestamp.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterOptionTypeBlob: FfiConverterRustBuffer { + typealias SwiftType = Blob? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeBlob.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeBlob.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterOptionTypeDocClock: FfiConverterRustBuffer { + typealias SwiftType = DocClock? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeDocClock.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeDocClock.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterOptionTypeDocRecord: FfiConverterRustBuffer { + typealias SwiftType = DocRecord? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeDocRecord.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeDocRecord.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterSequenceTimestamp: FfiConverterRustBuffer { + typealias SwiftType = [Date] + + public static func write(_ value: [Date], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTimestamp.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [Date] { + let len: Int32 = try readInt(&buf) + var seq = [Date]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTimestamp.read(from: &buf)) + } + return seq + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterSequenceTypeDocClock: FfiConverterRustBuffer { + typealias SwiftType = [DocClock] + + public static func write(_ value: [DocClock], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeDocClock.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [DocClock] { + let len: Int32 = try readInt(&buf) + var seq = [DocClock]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeDocClock.read(from: &buf)) + } + return seq + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterSequenceTypeDocUpdate: FfiConverterRustBuffer { + typealias SwiftType = [DocUpdate] + + public static func write(_ value: [DocUpdate], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeDocUpdate.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [DocUpdate] { + let len: Int32 = try readInt(&buf) + var seq = [DocUpdate]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeDocUpdate.read(from: &buf)) + } + return seq + } +} + +#if swift(>=5.8) +@_documentation(visibility: private) +#endif +fileprivate struct FfiConverterSequenceTypeListedBlob: FfiConverterRustBuffer { + typealias SwiftType = [ListedBlob] + + public static func write(_ value: [ListedBlob], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeListedBlob.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [ListedBlob] { + let len: Int32 = try readInt(&buf) + var seq = [ListedBlob]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeListedBlob.read(from: &buf)) + } + return seq + } +} +private let UNIFFI_RUST_FUTURE_POLL_READY: Int8 = 0 +private let UNIFFI_RUST_FUTURE_POLL_MAYBE_READY: Int8 = 1 + +fileprivate let uniffiContinuationHandleMap = UniffiHandleMap>() + +fileprivate func uniffiRustCallAsync( + rustFutureFunc: () -> UInt64, + pollFunc: (UInt64, @escaping UniffiRustFutureContinuationCallback, UInt64) -> (), + completeFunc: (UInt64, UnsafeMutablePointer) -> F, + freeFunc: (UInt64) -> (), + liftFunc: (F) throws -> T, + errorHandler: ((RustBuffer) throws -> Swift.Error)? +) async throws -> T { + // Make sure to call uniffiEnsureInitialized() since future creation doesn't have a + // RustCallStatus param, so doesn't use makeRustCall() + uniffiEnsureInitialized() + let rustFuture = rustFutureFunc() + defer { + freeFunc(rustFuture) + } + var pollResult: Int8; + repeat { + pollResult = await withUnsafeContinuation { + pollFunc( + rustFuture, + uniffiFutureContinuationCallback, + uniffiContinuationHandleMap.insert(obj: $0) + ) + } + } while pollResult != UNIFFI_RUST_FUTURE_POLL_READY + + return try liftFunc(makeRustCall( + { completeFunc(rustFuture, $0) }, + errorHandler: errorHandler + )) +} + +// Callback handlers for an async calls. These are invoked by Rust when the future is ready. They +// lift the return value or error and resume the suspended function. +fileprivate func uniffiFutureContinuationCallback(handle: UInt64, pollResult: Int8) { + if let continuation = try? uniffiContinuationHandleMap.remove(handle: handle) { + continuation.resume(returning: pollResult) + } else { + print("uniffiFutureContinuationCallback invalid handle") + } +} +public func hashcashMint(resource: String, bits: UInt32) -> String { + return try! FfiConverterString.lift(try! rustCall() { + uniffi_affine_mobile_native_fn_func_hashcash_mint( + FfiConverterString.lower(resource), + FfiConverterUInt32.lower(bits),$0 + ) +}) +} + +private enum InitializationResult { + case ok + case contractVersionMismatch + case apiChecksumMismatch +} +// Use a global variable to perform the versioning checks. Swift ensures that +// the code inside is only computed once. +private var initializationResult: InitializationResult = { + // Get the bindings contract version from our ComponentInterface + let bindings_contract_version = 26 + // Get the scaffolding contract version by calling the into the dylib + let scaffolding_contract_version = ffi_affine_mobile_native_uniffi_contract_version() + if bindings_contract_version != scaffolding_contract_version { + return InitializationResult.contractVersionMismatch + } + if (uniffi_affine_mobile_native_checksum_func_hashcash_mint() != 23633) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_checkpoint() != 36613) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_clear_clocks() != 25916) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_close() != 10808) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_connect() != 15551) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_delete_blob() != 9749) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_delete_doc() != 53248) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_blob() != 33049) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_clock() != 29534) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_clocks() != 44204) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_snapshot() != 9624) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_updates() != 57795) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_pulled_remote_clock() != 32416) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_pulled_remote_clocks() != 26840) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_pushed_clocks() != 27263) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_remote_clock() != 28366) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_remote_clocks() != 57998) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_is_closed() != 25468) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_list_blobs() != 7040) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_mark_updates_merged() != 56840) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_push_update() != 44084) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_release_blobs() != 11311) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_set_blob() != 32778) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_set_doc_snapshot() != 6431) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_set_peer_pulled_remote_clock() != 38810) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_set_peer_pushed_clock() != 4820) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_set_peer_remote_clock() != 23312) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_set_space_id() != 22706) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_method_docstorage_validate() != 11413) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_affine_mobile_native_checksum_constructor_docstorage_new() != 48015) { return InitializationResult.apiChecksumMismatch } diff --git a/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_nativeFFI.h b/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_nativeFFI.h index 127e443db819d..932d263e1609d 100644 --- a/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_nativeFFI.h +++ b/packages/frontend/apps/ios/App/App/uniffi/affine_mobile_nativeFFI.h @@ -250,6 +250,161 @@ typedef struct UniffiForeignFutureStructVoid { typedef void (*UniffiForeignFutureCompleteVoid)(uint64_t, UniffiForeignFutureStructVoid ); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_CLONE_DOCSTORAGE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_CLONE_DOCSTORAGE +void*_Nonnull uniffi_affine_mobile_native_fn_clone_docstorage(void*_Nonnull ptr, RustCallStatus *_Nonnull out_status +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FREE_DOCSTORAGE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FREE_DOCSTORAGE +void uniffi_affine_mobile_native_fn_free_docstorage(void*_Nonnull ptr, RustCallStatus *_Nonnull out_status +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_CONSTRUCTOR_DOCSTORAGE_NEW +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_CONSTRUCTOR_DOCSTORAGE_NEW +void*_Nonnull uniffi_affine_mobile_native_fn_constructor_docstorage_new(RustBuffer path, RustCallStatus *_Nonnull out_status +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CHECKPOINT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CHECKPOINT +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_checkpoint(void*_Nonnull ptr +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CLEAR_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CLEAR_CLOCKS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_clear_clocks(void*_Nonnull ptr +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CLOSE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CLOSE +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_close(void*_Nonnull ptr +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CONNECT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_CONNECT +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_connect(void*_Nonnull ptr +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_DELETE_BLOB +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_DELETE_BLOB +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_delete_blob(void*_Nonnull ptr, RustBuffer key, int8_t permanently +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_DELETE_DOC +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_DELETE_DOC +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_delete_doc(void*_Nonnull ptr, RustBuffer doc_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_BLOB +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_BLOB +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_blob(void*_Nonnull ptr, RustBuffer key +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_CLOCK +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_doc_clock(void*_Nonnull ptr, RustBuffer doc_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_CLOCKS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_doc_clocks(void*_Nonnull ptr, RustBuffer after +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_SNAPSHOT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_SNAPSHOT +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_doc_snapshot(void*_Nonnull ptr, RustBuffer doc_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_UPDATES +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_DOC_UPDATES +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_doc_updates(void*_Nonnull ptr, RustBuffer doc_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCK +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_peer_pulled_remote_clock(void*_Nonnull ptr, RustBuffer peer, RustBuffer doc_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCKS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_peer_pulled_remote_clocks(void*_Nonnull ptr, RustBuffer peer +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_PUSHED_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_PUSHED_CLOCKS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_peer_pushed_clocks(void*_Nonnull ptr, RustBuffer peer +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCK +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_peer_remote_clock(void*_Nonnull ptr, RustBuffer peer, RustBuffer doc_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCKS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_get_peer_remote_clocks(void*_Nonnull ptr, RustBuffer peer +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_IS_CLOSED +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_IS_CLOSED +int8_t uniffi_affine_mobile_native_fn_method_docstorage_is_closed(void*_Nonnull ptr, RustCallStatus *_Nonnull out_status +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_LIST_BLOBS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_LIST_BLOBS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_list_blobs(void*_Nonnull ptr +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_MARK_UPDATES_MERGED +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_MARK_UPDATES_MERGED +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_mark_updates_merged(void*_Nonnull ptr, RustBuffer doc_id, RustBuffer updates +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_PUSH_UPDATE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_PUSH_UPDATE +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_push_update(void*_Nonnull ptr, RustBuffer doc_id, RustBuffer update +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_RELEASE_BLOBS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_RELEASE_BLOBS +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_release_blobs(void*_Nonnull ptr +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_BLOB +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_BLOB +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_set_blob(void*_Nonnull ptr, RustBuffer blob +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_DOC_SNAPSHOT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_DOC_SNAPSHOT +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_set_doc_snapshot(void*_Nonnull ptr, RustBuffer snapshot +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_PEER_PULLED_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_PEER_PULLED_REMOTE_CLOCK +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_set_peer_pulled_remote_clock(void*_Nonnull ptr, RustBuffer peer, RustBuffer doc_id, RustBuffer clock +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_PEER_PUSHED_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_PEER_PUSHED_CLOCK +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_set_peer_pushed_clock(void*_Nonnull ptr, RustBuffer peer, RustBuffer doc_id, RustBuffer clock +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_PEER_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_PEER_REMOTE_CLOCK +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_set_peer_remote_clock(void*_Nonnull ptr, RustBuffer peer, RustBuffer doc_id, RustBuffer clock +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_SPACE_ID +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_SET_SPACE_ID +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_set_space_id(void*_Nonnull ptr, RustBuffer space_id +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_VALIDATE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_METHOD_DOCSTORAGE_VALIDATE +uint64_t uniffi_affine_mobile_native_fn_method_docstorage_validate(void*_Nonnull ptr +); #endif #ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FUNC_HASHCASH_MINT #define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_FN_FUNC_HASHCASH_MINT @@ -540,6 +695,180 @@ void ffi_affine_mobile_native_rust_future_complete_void(uint64_t handle, RustCal #define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_FUNC_HASHCASH_MINT uint16_t uniffi_affine_mobile_native_checksum_func_hashcash_mint(void +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CHECKPOINT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CHECKPOINT +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_checkpoint(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CLEAR_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CLEAR_CLOCKS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_clear_clocks(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CLOSE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CLOSE +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_close(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CONNECT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_CONNECT +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_connect(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_DELETE_BLOB +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_DELETE_BLOB +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_delete_blob(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_DELETE_DOC +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_DELETE_DOC +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_delete_doc(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_BLOB +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_BLOB +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_blob(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_CLOCK +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_clock(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_CLOCKS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_clocks(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_SNAPSHOT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_SNAPSHOT +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_snapshot(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_UPDATES +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_DOC_UPDATES +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_doc_updates(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCK +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_pulled_remote_clock(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_PULLED_REMOTE_CLOCKS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_pulled_remote_clocks(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_PUSHED_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_PUSHED_CLOCKS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_pushed_clocks(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCK +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_remote_clock(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCKS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_GET_PEER_REMOTE_CLOCKS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_get_peer_remote_clocks(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_IS_CLOSED +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_IS_CLOSED +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_is_closed(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_LIST_BLOBS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_LIST_BLOBS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_list_blobs(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_MARK_UPDATES_MERGED +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_MARK_UPDATES_MERGED +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_mark_updates_merged(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_PUSH_UPDATE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_PUSH_UPDATE +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_push_update(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_RELEASE_BLOBS +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_RELEASE_BLOBS +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_release_blobs(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_BLOB +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_BLOB +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_set_blob(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_DOC_SNAPSHOT +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_DOC_SNAPSHOT +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_set_doc_snapshot(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_PEER_PULLED_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_PEER_PULLED_REMOTE_CLOCK +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_set_peer_pulled_remote_clock(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_PEER_PUSHED_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_PEER_PUSHED_CLOCK +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_set_peer_pushed_clock(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_PEER_REMOTE_CLOCK +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_PEER_REMOTE_CLOCK +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_set_peer_remote_clock(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_SPACE_ID +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_SET_SPACE_ID +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_set_space_id(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_VALIDATE +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_METHOD_DOCSTORAGE_VALIDATE +uint16_t uniffi_affine_mobile_native_checksum_method_docstorage_validate(void + +); +#endif +#ifndef UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_CONSTRUCTOR_DOCSTORAGE_NEW +#define UNIFFI_FFIDEF_UNIFFI_AFFINE_MOBILE_NATIVE_CHECKSUM_CONSTRUCTOR_DOCSTORAGE_NEW +uint16_t uniffi_affine_mobile_native_checksum_constructor_docstorage_new(void + ); #endif #ifndef UNIFFI_FFIDEF_FFI_AFFINE_MOBILE_NATIVE_UNIFFI_CONTRACT_VERSION diff --git a/packages/frontend/apps/ios/App/xc-universal-binary.sh b/packages/frontend/apps/ios/App/xc-universal-binary.sh index 4c13990bd277a..b993008dc1045 100644 --- a/packages/frontend/apps/ios/App/xc-universal-binary.sh +++ b/packages/frontend/apps/ios/App/xc-universal-binary.sh @@ -71,4 +71,4 @@ for arch in $ARCHS; do esac done -$HOME/.cargo/bin/cargo run --bin uniffi-bindgen generate --library $SRCROOT/lib${FFI_TARGET}.a --language swift --out-dir $SRCROOT/../../ios/App/App/uniffi +$HOME/.cargo/bin/cargo run -p affine_mobile_native --bin uniffi-bindgen generate --library $SRCROOT/lib${FFI_TARGET}.a --language swift --out-dir $SRCROOT/../../ios/App/App/uniffi diff --git a/packages/frontend/apps/ios/package.json b/packages/frontend/apps/ios/package.json index b38796097f4cc..d7e0bb84f9875 100644 --- a/packages/frontend/apps/ios/package.json +++ b/packages/frontend/apps/ios/package.json @@ -26,9 +26,11 @@ "next-themes": "^0.4.4", "react": "^19.0.0", "react-dom": "^19.0.0", - "react-router-dom": "^6.28.0" + "react-router-dom": "^6.28.0", + "yjs": "13.6.18" }, "devDependencies": { + "@affine/native": "workspace:*", "@capacitor/cli": "^6.2.0", "@types/react": "^19.0.1", "@types/react-dom": "^19.0.2", diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/blob.ts b/packages/frontend/apps/ios/src/plugins/nbstore/blob.ts new file mode 100644 index 0000000000000..c1e0641db9bf0 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/blob.ts @@ -0,0 +1,33 @@ +import { type BlobRecord, BlobStorageBase, share } from '@affine/nbstore'; + +import { NativeDBConnection } from './db'; + +export class SqliteBlobStorage extends BlobStorageBase { + override connection = share( + new NativeDBConnection(this.peer, this.spaceType, this.spaceId) + ); + + get db() { + return this.connection.inner; + } + + override async get(key: string) { + return this.db.getBlob(key); + } + + override async set(blob: BlobRecord) { + await this.db.setBlob(blob); + } + + override async delete(key: string, permanently: boolean) { + await this.db.deleteBlob(key, permanently); + } + + override async release() { + await this.db.releaseBlobs(); + } + + override async list() { + return this.db.listBlobs(); + } +} diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/db.ts b/packages/frontend/apps/ios/src/plugins/nbstore/db.ts new file mode 100644 index 0000000000000..194e139f7c928 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/db.ts @@ -0,0 +1,46 @@ +import { DocStorage as NativeDocStorage } from '@affine/native'; +import { AutoReconnectConnection, type SpaceType } from '@affine/nbstore'; + +import { NbStoreDocStorage } from './plugin'; + +export class NativeDBConnection extends AutoReconnectConnection { + constructor( + private readonly peer: string, + private readonly type: SpaceType, + private readonly id: string + ) { + super(); + } + + async getDBPath() { + const { path } = await NbStoreDocStorage.getSpaceDBPath({ + peer: this.peer, + spaceType: this.type, + id: this.id, + }); + return path; + } + + override get shareId(): string { + return `sqlite:${this.peer}:${this.type}:${this.id}`; + } + + override async doConnect() { + const dbPath = await this.getDBPath(); + const conn = new NativeDocStorage(dbPath); + await conn.connect(); + console.info('[nbstore] connection established', this.shareId); + return conn; + } + + override doDisconnect(conn: NativeDocStorage) { + conn + .close() + .then(() => { + console.info('[nbstore] connection closed', this.shareId); + }) + .catch(err => { + console.error('[nbstore] connection close failed', this.shareId, err); + }); + } +} diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts b/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts new file mode 100644 index 0000000000000..c9b2b9a673d38 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/definitions.ts @@ -0,0 +1,124 @@ +export interface Blob { + key: string; + // base64 encoded data + data: string; + mime: string; + size: number; + createdAt: number; +} + +export interface SetBlob { + key: string; + // base64 encoded data + data: string; + mime: string; +} + +export interface ListedBlob { + key: string; + mime: string; + size: number; + createdAt: number; +} + +export interface DocClock { + docId: string; + timestamp: number; +} + +export interface NbStorePlugin { + getSpaceDBPath: (options: { + peer: string; + spaceType: string; + id: string; + }) => Promise<{ path: string }>; + create: (options: { path: string }) => Promise; + connect: () => Promise; + close: () => Promise; + isClosed: () => Promise<{ isClosed: boolean }>; + checkpoint: () => Promise; + validate: () => Promise<{ isValidate: boolean }>; + + setSpaceId: (options: { spaceId: string }) => Promise; + pushUpdate: (options: { + docId: string; + data: string; + }) => Promise<{ timestamp: number }>; + getDocSnapshot: (options: { docId: string }) => Promise< + | { + docId: string; + // base64 encoded data + data: string; + timestamp: number; + } + | undefined + >; + setDocSnapshot: (options: { + docId: string; + data: string; + }) => Promise<{ success: boolean }>; + getDocUpdates: (options: { docId: string }) => Promise< + { + docId: string; + createdAt: number; + // base64 encoded data + data: string; + }[] + >; + markUpdatesMerged: (options: { + docId: string; + timestamps: number[]; + }) => Promise<{ count: number }>; + deleteDoc: (options: { docId: string }) => Promise; + getDocClocks: (options: { after: number }) => Promise< + { + docId: string; + timestamp: number; + }[] + >; + getDocClock: (options: { docId: string }) => Promise< + | { + docId: string; + timestamp: number; + } + | undefined + >; + getBlob: (options: { key: string }) => Promise; + setBlob: (options: SetBlob) => Promise; + deleteBlob: (options: { key: string; permanently: boolean }) => Promise; + releaseBlobs: () => Promise; + listBlobs: () => Promise>; + getPeerRemoteClocks: (options: { peer: string }) => Promise>; + getPeerRemoteClock: (options: { + peer: string; + docId: string; + }) => Promise; + setPeerRemoteClock: (options: { + peer: string; + docId: string; + clock: number; + }) => Promise; + getPeerPushedClocks: (options: { peer: string }) => Promise>; + getPeerPushedClock: (options: { + peer: string; + docId: string; + }) => Promise; + setPeerPushedClock: (options: { + peer: string; + docId: string; + clock: number; + }) => Promise; + getPeerPulledRemoteClocks: (options: { + peer: string; + }) => Promise>; + getPeerPulledRemoteClock: (options: { + peer: string; + docId: string; + }) => Promise; + setPeerPulledRemoteClock: (options: { + peer: string; + docId: string; + clock: number; + }) => Promise; + clearClocks: () => Promise; +} diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/doc.ts b/packages/frontend/apps/ios/src/plugins/nbstore/doc.ts new file mode 100644 index 0000000000000..4078f50513d1d --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/doc.ts @@ -0,0 +1,83 @@ +import { + type DocClocks, + type DocRecord, + DocStorageBase, + type DocUpdate, + share, +} from '@affine/nbstore'; + +import { NativeDBConnection } from './db'; + +export class SqliteDocStorage extends DocStorageBase { + override connection = share( + new NativeDBConnection(this.peer, this.spaceType, this.spaceId) + ); + + get db() { + return this.connection.inner; + } + + override async pushDocUpdate(update: DocUpdate) { + const timestamp = await this.db.pushUpdate(update.docId, update.bin); + + return { docId: update.docId, timestamp }; + } + + override async deleteDoc(docId: string) { + await this.db.deleteDoc(docId); + } + + override async getDocTimestamps(after?: Date) { + const clocks = await this.db.getDocClocks(after); + + return clocks.reduce((ret, cur) => { + ret[cur.docId] = cur.timestamp; + return ret; + }, {} as DocClocks); + } + + override async getDocTimestamp(docId: string) { + return this.db.getDocClock(docId); + } + + protected override async getDocSnapshot(docId: string) { + const snapshot = await this.db.getDocSnapshot(docId); + + if (!snapshot) { + return null; + } + + return { + docId, + bin: snapshot.data, + timestamp: snapshot.timestamp, + }; + } + + protected override async setDocSnapshot( + snapshot: DocRecord + ): Promise { + return this.db.setDocSnapshot({ + docId: snapshot.docId, + data: Buffer.from(snapshot.bin), + timestamp: new Date(snapshot.timestamp), + }); + } + + protected override async getDocUpdates(docId: string) { + return this.db.getDocUpdates(docId).then(updates => + updates.map(update => ({ + docId, + bin: update.data, + timestamp: update.createdAt, + })) + ); + } + + protected override markUpdatesMerged(docId: string, updates: DocRecord[]) { + return this.db.markUpdatesMerged( + docId, + updates.map(update => update.timestamp) + ); + } +} diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/handlers.ts b/packages/frontend/apps/ios/src/plugins/nbstore/handlers.ts new file mode 100644 index 0000000000000..946cb79cf5041 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/handlers.ts @@ -0,0 +1,128 @@ +import { + type BlobRecord, + type DocClock, + type DocUpdate, +} from '@affine/nbstore'; + +import { ensureStorage, getStorage } from './storage'; + +export const nbstoreHandlers = { + connect: async (id: string) => { + await ensureStorage(id); + }, + + close: async (id: string) => { + const store = getStorage(id); + + if (store) { + store.disconnect(); + // The store may be shared with other tabs, so we don't delete it from cache + // the underlying connection will handle the close correctly + // STORE_CACHE.delete(`${spaceType}:${spaceId}`); + } + }, + + pushDocUpdate: async (id: string, update: DocUpdate) => { + const store = await ensureStorage(id); + return store.get('doc').pushDocUpdate(update); + }, + + getDoc: async (id: string, docId: string) => { + const store = await ensureStorage(id); + return store.get('doc').getDoc(docId); + }, + + deleteDoc: async (id: string, docId: string) => { + const store = await ensureStorage(id); + return store.get('doc').deleteDoc(docId); + }, + + getDocTimestamps: async (id: string, after?: Date) => { + const store = await ensureStorage(id); + return store.get('doc').getDocTimestamps(after); + }, + + getDocTimestamp: async (id: string, docId: string) => { + const store = await ensureStorage(id); + return store.get('doc').getDocTimestamp(docId); + }, + + setBlob: async (id: string, blob: BlobRecord) => { + const store = await ensureStorage(id); + return store.get('blob').set(blob); + }, + + getBlob: async (id: string, key: string) => { + const store = await ensureStorage(id); + return store.get('blob').get(key); + }, + + deleteBlob: async (id: string, key: string, permanently: boolean) => { + const store = await ensureStorage(id); + return store.get('blob').delete(key, permanently); + }, + + listBlobs: async (id: string) => { + const store = await ensureStorage(id); + return store.get('blob').list(); + }, + + releaseBlobs: async (id: string) => { + const store = await ensureStorage(id); + return store.get('blob').release(); + }, + + getPeerRemoteClocks: async (id: string, peer: string) => { + const store = await ensureStorage(id); + return store.get('sync').getPeerRemoteClocks(peer); + }, + + getPeerRemoteClock: async (id: string, peer: string, docId: string) => { + const store = await ensureStorage(id); + return store.get('sync').getPeerRemoteClock(peer, docId); + }, + + setPeerRemoteClock: async (id: string, peer: string, clock: DocClock) => { + const store = await ensureStorage(id); + return store.get('sync').setPeerRemoteClock(peer, clock); + }, + + getPeerPulledRemoteClocks: async (id: string, peer: string) => { + const store = await ensureStorage(id); + return store.get('sync').getPeerPulledRemoteClocks(peer); + }, + + getPeerPulledRemoteClock: async (id: string, peer: string, docId: string) => { + const store = await ensureStorage(id); + return store.get('sync').getPeerPulledRemoteClock(peer, docId); + }, + + setPeerPulledRemoteClock: async ( + id: string, + peer: string, + clock: DocClock + ) => { + const store = await ensureStorage(id); + return store.get('sync').setPeerPulledRemoteClock(peer, clock); + }, + + getPeerPushedClocks: async (id: string, peer: string) => { + const store = await ensureStorage(id); + return store.get('sync').getPeerPushedClocks(peer); + }, + + getPeerPushedClock: async (id: string, peer: string, docId: string) => { + const store = await ensureStorage(id); + return store.get('sync').getPeerPushedClock(peer, docId); + }, + + setPeerPushedClock: async (id: string, peer: string, clock: DocClock) => { + const store = await ensureStorage(id); + return store.get('sync').setPeerPushedClock(peer, clock); + }, + + clearClocks: async (id: string) => { + const store = await ensureStorage(id); + return store.get('sync').clearClocks(); + }, +}; diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/index.ts b/packages/frontend/apps/ios/src/plugins/nbstore/index.ts new file mode 100644 index 0000000000000..6d17cae7d3acc --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/index.ts @@ -0,0 +1,5 @@ +export * from './definitions'; +export { nbstoreHandlers } from './handlers'; +export { NbStoreDocStorage } from './plugin'; +export * from './storage'; +export { universalId } from '@affine/nbstore'; diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/plugin.ts b/packages/frontend/apps/ios/src/plugins/nbstore/plugin.ts new file mode 100644 index 0000000000000..b3d7cd4a3eb49 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/plugin.ts @@ -0,0 +1,247 @@ +import { + base64ToUint8Array, + uint8ArrayToBase64, +} from '@affine/core/modules/workspace-engine'; +import { + type Blob, + type DocClock, + type DocRecord, + type DocStorage, + type DocUpdate, + type ListedBlob, +} from '@affine/native'; +import { registerPlugin } from '@capacitor/core'; + +import type { NbStorePlugin } from './definitions'; + +export const NbStoreDocStorage = + registerPlugin('NbStoreDocStorage'); + +export interface SetBlob { + key: string; + data: Uint8Array; + mime: string; +} + +export class NativeDocStorage implements DocStorage { + /** Initialize the database and run migrations. */ + connect(): Promise { + return NbStoreDocStorage.connect(); + } + + close(): Promise { + return NbStoreDocStorage.close(); + } + + get isClosed(): Promise { + return NbStoreDocStorage.isClosed().then(result => result.isClosed); + } + /** + * Flush the WAL file to the database file. + * See https://www.sqlite.org/pragma.html#pragma_wal_checkpoint:~:text=PRAGMA%20schema.wal_checkpoint%3B + */ + checkpoint(): Promise { + return NbStoreDocStorage.checkpoint(); + } + + validate(): Promise { + return NbStoreDocStorage.validate().then(result => result.isValidate); + } + + setSpaceId(spaceId: string): Promise { + return NbStoreDocStorage.setSpaceId({ spaceId }); + } + + async pushUpdate(docId: string, update: Uint8Array): Promise { + return NbStoreDocStorage.pushUpdate({ + docId, + data: await uint8ArrayToBase64(update), + }).then(result => new Date(result.timestamp)); + } + + getDocSnapshot(docId: string): Promise { + return NbStoreDocStorage.getDocSnapshot({ docId }).then(result => { + if (result) { + return { + ...result, + data: base64ToUint8Array(result.data), + timestamp: new Date(result.timestamp), + }; + } + return null; + }); + } + + async setDocSnapshot(snapshot: DocRecord): Promise { + return NbStoreDocStorage.setDocSnapshot({ + docId: snapshot.docId, + data: await uint8ArrayToBase64(snapshot.data), + }).then(result => result.success); + } + + getDocUpdates(docId: string): Promise> { + return NbStoreDocStorage.getDocUpdates({ docId }).then(result => + result.map(update => ({ + ...update, + data: base64ToUint8Array(update.data), + createdAt: new Date(update.createdAt), + })) + ); + } + + markUpdatesMerged(docId: string, updates: Array): Promise { + return NbStoreDocStorage.markUpdatesMerged({ + docId, + timestamps: updates.map(date => date.getTime()), + }).then(result => result.count); + } + + deleteDoc(docId: string): Promise { + return NbStoreDocStorage.deleteDoc({ docId }); + } + + getDocClocks(after: Date): Promise> { + return NbStoreDocStorage.getDocClocks({ + after: after.getTime(), + }).then(result => + result.map(clock => ({ + ...clock, + timestamp: new Date(clock.timestamp), + })) + ); + } + + getDocClock(docId: string): Promise { + return NbStoreDocStorage.getDocClock({ docId }).then(result => { + if (result) { + return { + ...result, + timestamp: new Date(result.timestamp), + }; + } + return null; + }); + } + + getBlob(key: string): Promise { + return NbStoreDocStorage.getBlob({ key }).then(result => { + if (result) { + return { + ...result, + data: base64ToUint8Array(result.data), + createdAt: new Date(result.createdAt), + }; + } + return null; + }); + } + + async setBlob(blob: SetBlob): Promise { + return NbStoreDocStorage.setBlob({ + key: blob.key, + data: await uint8ArrayToBase64(blob.data), + mime: blob.mime, + }); + } + + deleteBlob(key: string, permanently: boolean): Promise { + return NbStoreDocStorage.deleteBlob({ key, permanently }); + } + + releaseBlobs(): Promise { + return NbStoreDocStorage.releaseBlobs(); + } + + async listBlobs(): Promise> { + return (await NbStoreDocStorage.listBlobs()).map(blob => ({ + ...blob, + createdAt: new Date(blob.createdAt), + })); + } + + getPeerRemoteClocks(peer: string): Promise> { + return NbStoreDocStorage.getPeerRemoteClocks({ peer }).then(result => + result.map(clock => ({ + ...clock, + timestamp: new Date(clock.timestamp), + })) + ); + } + + getPeerRemoteClock(peer: string, docId: string): Promise { + return NbStoreDocStorage.getPeerRemoteClock({ peer, docId }).then( + result => ({ + ...result, + timestamp: new Date(result.timestamp), + }) + ); + } + + setPeerRemoteClock(peer: string, docId: string, clock: Date): Promise { + return NbStoreDocStorage.setPeerRemoteClock({ + peer, + docId, + clock: clock.getTime(), + }); + } + + getPeerPulledRemoteClocks(peer: string): Promise> { + return NbStoreDocStorage.getPeerPulledRemoteClocks({ peer }).then(result => + result.map(clock => ({ + ...clock, + timestamp: new Date(clock.timestamp), + })) + ); + } + + getPeerPulledRemoteClock(peer: string, docId: string): Promise { + return NbStoreDocStorage.getPeerPulledRemoteClock({ peer, docId }).then( + result => ({ + ...result, + timestamp: new Date(result.timestamp), + }) + ); + } + + setPeerPulledRemoteClock( + peer: string, + docId: string, + clock: Date + ): Promise { + return NbStoreDocStorage.setPeerPulledRemoteClock({ + peer, + docId, + clock: clock.getTime(), + }); + } + + getPeerPushedClocks(peer: string): Promise> { + return NbStoreDocStorage.getPeerPushedClocks({ peer }).then(result => + result.map(clock => ({ + ...clock, + timestamp: new Date(clock.timestamp), + })) + ); + } + + getPeerPushedClock(peer: string, docId: string): Promise { + return NbStoreDocStorage.getPeerPushedClock({ peer, docId }).then( + result => ({ + ...result, + timestamp: new Date(result.timestamp), + }) + ); + } + + setPeerPushedClock(peer: string, docId: string, clock: Date): Promise { + return NbStoreDocStorage.setPeerPushedClock({ + peer, + docId, + clock: clock.getTime(), + }); + } + + clearClocks(): Promise { + return NbStoreDocStorage.clearClocks(); + } +} diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/storage.ts b/packages/frontend/apps/ios/src/plugins/nbstore/storage.ts new file mode 100644 index 0000000000000..5685743160f61 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/storage.ts @@ -0,0 +1,83 @@ +import { parseUniversalId, SpaceStorage } from '@affine/nbstore'; +import { applyUpdate, Doc as YDoc } from 'yjs'; + +import { SqliteBlobStorage } from './blob'; +import { NativeDBConnection } from './db'; +import { SqliteDocStorage } from './doc'; +import { SqliteSyncStorage } from './sync'; + +export class SqliteSpaceStorage extends SpaceStorage { + get connection() { + const docStore = this.get('doc'); + + if (!docStore) { + throw new Error('doc store not found'); + } + + const connection = docStore.connection; + + if (!(connection instanceof NativeDBConnection)) { + throw new Error('doc store connection is not a Sqlite connection'); + } + + return connection; + } + + async getDBPath() { + return this.connection.getDBPath(); + } + + async getWorkspaceName() { + const docStore = this.tryGet('doc'); + + if (!docStore) { + return null; + } + + const doc = await docStore.getDoc(docStore.spaceId); + if (!doc) { + return null; + } + + const ydoc = new YDoc(); + applyUpdate(ydoc, doc.bin); + return ydoc.getMap('meta').get('name') as string; + } + + async checkpoint() { + await this.connection.inner.checkpoint(); + } +} + +const STORE_CACHE = new Map(); + +export function getStorage(universalId: string) { + return STORE_CACHE.get(universalId); +} + +export async function ensureStorage(universalId: string) { + const { peer, type, id } = parseUniversalId(universalId); + let store = STORE_CACHE.get(universalId); + + if (!store) { + const opts = { + peer, + type, + id, + }; + + store = new SqliteSpaceStorage([ + new SqliteDocStorage(opts), + new SqliteBlobStorage(opts), + new SqliteSyncStorage(opts), + ]); + + store.connect(); + + await store.waitForConnected(); + + STORE_CACHE.set(universalId, store); + } + + return store; +} diff --git a/packages/frontend/apps/ios/src/plugins/nbstore/sync.ts b/packages/frontend/apps/ios/src/plugins/nbstore/sync.ts new file mode 100644 index 0000000000000..2942371b59be2 --- /dev/null +++ b/packages/frontend/apps/ios/src/plugins/nbstore/sync.ts @@ -0,0 +1,70 @@ +import { + BasicSyncStorage, + type DocClock, + type DocClocks, + share, +} from '@affine/nbstore'; + +import { NativeDBConnection } from './db'; + +export class SqliteSyncStorage extends BasicSyncStorage { + override connection = share( + new NativeDBConnection(this.peer, this.spaceType, this.spaceId) + ); + + get db() { + return this.connection.inner; + } + + override async getPeerRemoteClocks(peer: string) { + const records = await this.db.getPeerRemoteClocks(peer); + return records.reduce((clocks, { docId, timestamp }) => { + clocks[docId] = timestamp; + return clocks; + }, {} as DocClocks); + } + + override async getPeerRemoteClock(peer: string, docId: string) { + return this.db.getPeerRemoteClock(peer, docId); + } + + override async setPeerRemoteClock(peer: string, clock: DocClock) { + await this.db.setPeerRemoteClock(peer, clock.docId, clock.timestamp); + } + + override async getPeerPulledRemoteClock(peer: string, docId: string) { + return this.db.getPeerPulledRemoteClock(peer, docId); + } + + override async getPeerPulledRemoteClocks(peer: string) { + const records = await this.db.getPeerPulledRemoteClocks(peer); + return records.reduce((clocks, { docId, timestamp }) => { + clocks[docId] = timestamp; + return clocks; + }, {} as DocClocks); + } + + override async setPeerPulledRemoteClock(peer: string, clock: DocClock) { + await this.db.setPeerPulledRemoteClock(peer, clock.docId, clock.timestamp); + } + + override async getPeerPushedClocks(peer: string) { + const records = await this.db.getPeerPushedClocks(peer); + return records.reduce((clocks, { docId, timestamp }) => { + clocks[docId] = timestamp; + return clocks; + }, {} as DocClocks); + } + + override async getPeerPushedClock(peer: string, docId: string) { + return this.db.getPeerPushedClock(peer, docId); + } + + override async setPeerPushedClock(peer: string, clock: DocClock) { + await this.db.setPeerPushedClock(peer, clock.docId, clock.timestamp); + } + + override async clearClocks() { + await this.db.clearClocks(); + } +} diff --git a/packages/frontend/core/src/modules/workspace-engine/index.ts b/packages/frontend/core/src/modules/workspace-engine/index.ts index bde7581b45c44..aa634f354413e 100644 --- a/packages/frontend/core/src/modules/workspace-engine/index.ts +++ b/packages/frontend/core/src/modules/workspace-engine/index.ts @@ -16,6 +16,7 @@ import { import { WorkspaceEngineStorageProvider } from './providers/engine'; export { CloudBlobStorage } from './impls/engine/blob-cloud'; +export { base64ToUint8Array, uint8ArrayToBase64 } from './utils/base64'; export function configureBrowserWorkspaceFlavours(framework: Framework) { framework diff --git a/packages/frontend/mobile-native/Cargo.toml b/packages/frontend/mobile-native/Cargo.toml index 531e966aad6b7..2072e142a015a 100644 --- a/packages/frontend/mobile-native/Cargo.toml +++ b/packages/frontend/mobile-native/Cargo.toml @@ -12,8 +12,14 @@ name = "uniffi-bindgen" path = "uniffi-bindgen.rs" [dependencies] -affine_common = { workspace = true } -uniffi = { version = "0.28", features = ["cli"] } +anyhow = { workspace = true } +affine_common = { workspace = true } +affine_nbstore = { workspace = true, features = ["noop"] } +base64-simd = { workspace = true } +chrono = { workspace = true } +uniffi = { workspace = true, features = ["cli"] } +thiserror = { workspace = true } +sqlx = { workspace = true } [build-dependencies] -uniffi = { version = "0.28", features = ["build"] } +uniffi = { workspace = true, features = ["build"] } diff --git a/packages/frontend/mobile-native/src/error.rs b/packages/frontend/mobile-native/src/error.rs new file mode 100644 index 0000000000000..fef05e26e8108 --- /dev/null +++ b/packages/frontend/mobile-native/src/error.rs @@ -0,0 +1,19 @@ +use thiserror::Error; + +#[derive(uniffi::Error, Error, Debug)] +pub enum UniffiError { + #[error("Empty doc storage path")] + EmptyDocStoragePath, + #[error("Empty space id")] + EmptySpaceId, + #[error("Sqlx error: {0}")] + SqlxError(String), + #[error("Base64 decoding error: {0}")] + Base64DecodingError(String), +} + +impl From for UniffiError { + fn from(err: sqlx::Error) -> Self { + UniffiError::SqlxError(err.to_string()) + } +} diff --git a/packages/frontend/mobile-native/src/lib.rs b/packages/frontend/mobile-native/src/lib.rs index 595c15cc3849b..4f329e2886b9d 100644 --- a/packages/frontend/mobile-native/src/lib.rs +++ b/packages/frontend/mobile-native/src/lib.rs @@ -1,4 +1,11 @@ +use std::time::SystemTime; + use affine_common::hashcash::Stamp; +use affine_nbstore::storage; + +use crate::error::UniffiError; + +mod error; uniffi::setup_scaffolding!("affine_mobile_native"); @@ -6,3 +13,431 @@ uniffi::setup_scaffolding!("affine_mobile_native"); pub fn hashcash_mint(resource: String, bits: u32) -> String { Stamp::mint(resource, Some(bits)).format() } + +#[derive(uniffi::Record)] +pub struct DocRecord { + pub doc_id: String, + // base64 encoded data + pub data: String, + pub timestamp: SystemTime, +} + +impl From for DocRecord { + fn from(record: affine_nbstore::DocRecord) -> Self { + Self { + doc_id: record.doc_id, + data: base64_simd::STANDARD.encode_to_string(&record.data), + timestamp: record.timestamp.and_utc().into(), + } + } +} + +impl TryFrom for affine_nbstore::DocRecord { + type Error = UniffiError; + + fn try_from(record: DocRecord) -> Result { + Ok(Self { + doc_id: record.doc_id, + data: base64_simd::STANDARD + .decode_to_vec(record.data) + .map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?, + timestamp: chrono::DateTime::::from(record.timestamp).naive_utc(), + }) + } +} + +#[derive(uniffi::Record)] +pub struct DocUpdate { + pub doc_id: String, + pub created_at: SystemTime, + // base64 encoded data + pub data: String, +} + +impl From for DocUpdate { + fn from(update: affine_nbstore::DocUpdate) -> Self { + Self { + doc_id: update.doc_id, + created_at: update.created_at.and_utc().into(), + data: base64_simd::STANDARD.encode_to_string(&update.data), + } + } +} + +impl From for affine_nbstore::DocUpdate { + fn from(update: DocUpdate) -> Self { + Self { + doc_id: update.doc_id, + created_at: chrono::DateTime::::from(update.created_at).naive_utc(), + data: update.data.into(), + } + } +} + +#[derive(uniffi::Record)] +pub struct DocClock { + pub doc_id: String, + pub timestamp: SystemTime, +} + +impl From for DocClock { + fn from(clock: affine_nbstore::DocClock) -> Self { + Self { + doc_id: clock.doc_id, + timestamp: clock.timestamp.and_utc().into(), + } + } +} + +impl From for affine_nbstore::DocClock { + fn from(clock: DocClock) -> Self { + Self { + doc_id: clock.doc_id, + timestamp: chrono::DateTime::::from(clock.timestamp).naive_utc(), + } + } +} + +#[derive(uniffi::Record)] +pub struct Blob { + pub key: String, + // base64 encoded data + pub data: String, + pub mime: String, + pub size: i64, + pub created_at: SystemTime, +} + +impl From for Blob { + fn from(blob: affine_nbstore::Blob) -> Self { + Self { + key: blob.key, + data: base64_simd::STANDARD.encode_to_string(&blob.data), + mime: blob.mime, + size: blob.size, + created_at: blob.created_at.and_utc().into(), + } + } +} + +#[derive(uniffi::Record)] +pub struct SetBlob { + pub key: String, + // base64 encoded data + pub data: String, + pub mime: String, +} + +impl TryFrom for affine_nbstore::SetBlob { + type Error = UniffiError; + + fn try_from(blob: SetBlob) -> Result { + Ok(Self { + key: blob.key, + data: base64_simd::STANDARD + .decode_to_vec(blob.data) + .map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?, + mime: blob.mime, + }) + } +} + +#[derive(uniffi::Record)] +pub struct ListedBlob { + pub key: String, + pub size: i64, + pub mime: String, + pub created_at: SystemTime, +} + +impl From for ListedBlob { + fn from(blob: affine_nbstore::ListedBlob) -> Self { + Self { + key: blob.key, + size: blob.size, + mime: blob.mime, + created_at: blob.created_at.and_utc().into(), + } + } +} + +#[derive(uniffi::Object)] +pub struct DocStorage { + storage: storage::SqliteDocStorage, +} + +#[uniffi::export] +impl DocStorage { + #[uniffi::constructor] + pub fn new(path: String) -> Result { + if path.is_empty() { + return Err(UniffiError::EmptyDocStoragePath); + } + Ok(Self { + storage: storage::SqliteDocStorage::new(path), + }) + } + + /// Initialize the database and run migrations. + pub async fn connect(&self) -> Result<(), UniffiError> { + Ok(self.storage.connect().await?) + } + + pub async fn close(&self) -> Result<(), UniffiError> { + Ok(self.storage.close().await) + } + + pub fn is_closed(&self) -> bool { + self.storage.is_closed() + } + + pub async fn checkpoint(&self) -> Result<(), UniffiError> { + Ok(self.storage.checkpoint().await?) + } + + pub async fn validate(&self) -> Result { + Ok(self.storage.validate().await?) + } + + pub async fn set_space_id(&self, space_id: String) -> Result<(), UniffiError> { + if space_id.is_empty() { + return Err(UniffiError::EmptySpaceId); + } + Ok(self.storage.set_space_id(space_id).await?) + } + + pub async fn push_update( + &self, + doc_id: String, + update: String, + ) -> Result { + Ok( + self + .storage + .push_update( + doc_id, + base64_simd::STANDARD + .decode_to_vec(update) + .map_err(|e| UniffiError::Base64DecodingError(e.to_string()))?, + ) + .await? + .and_utc() + .into(), + ) + } + + pub async fn get_doc_snapshot(&self, doc_id: String) -> Result, UniffiError> { + Ok(self.storage.get_doc_snapshot(doc_id).await?.map(Into::into)) + } + + pub async fn set_doc_snapshot(&self, snapshot: DocRecord) -> Result { + Ok(self.storage.set_doc_snapshot(snapshot.try_into()?).await?) + } + + pub async fn get_doc_updates(&self, doc_id: String) -> Result, UniffiError> { + Ok( + self + .storage + .get_doc_updates(doc_id) + .await? + .into_iter() + .map(Into::into) + .collect(), + ) + } + + pub async fn mark_updates_merged( + &self, + doc_id: String, + updates: Vec, + ) -> Result { + Ok( + self + .storage + .mark_updates_merged( + doc_id, + updates + .into_iter() + .map(|t| chrono::DateTime::::from(t).naive_utc()) + .collect(), + ) + .await?, + ) + } + + pub async fn delete_doc(&self, doc_id: String) -> Result<(), UniffiError> { + Ok(self.storage.delete_doc(doc_id).await?) + } + + pub async fn get_doc_clocks( + &self, + after: Option, + ) -> Result, UniffiError> { + Ok( + self + .storage + .get_doc_clocks(after.map(|t| chrono::DateTime::::from(t).naive_utc())) + .await? + .into_iter() + .map(Into::into) + .collect(), + ) + } + + pub async fn get_doc_clock(&self, doc_id: String) -> Result, UniffiError> { + Ok(self.storage.get_doc_clock(doc_id).await?.map(Into::into)) + } + + pub async fn get_blob(&self, key: String) -> Result, UniffiError> { + Ok(self.storage.get_blob(key).await?.map(Into::into)) + } + + pub async fn set_blob(&self, blob: SetBlob) -> Result<(), UniffiError> { + Ok(self.storage.set_blob(blob.try_into()?).await?) + } + + pub async fn delete_blob(&self, key: String, permanently: bool) -> Result<(), UniffiError> { + Ok(self.storage.delete_blob(key, permanently).await?) + } + + pub async fn release_blobs(&self) -> Result<(), UniffiError> { + Ok(self.storage.release_blobs().await?) + } + + pub async fn list_blobs(&self) -> Result, UniffiError> { + Ok( + self + .storage + .list_blobs() + .await? + .into_iter() + .map(Into::into) + .collect(), + ) + } + + pub async fn get_peer_remote_clocks(&self, peer: String) -> Result, UniffiError> { + Ok( + self + .storage + .get_peer_remote_clocks(peer) + .await? + .into_iter() + .map(Into::into) + .collect(), + ) + } + + pub async fn get_peer_remote_clock( + &self, + peer: String, + doc_id: String, + ) -> Result { + Ok( + self + .storage + .get_peer_remote_clock(peer, doc_id) + .await? + .into(), + ) + } + + pub async fn set_peer_remote_clock( + &self, + peer: String, + doc_id: String, + clock: SystemTime, + ) -> Result<(), UniffiError> { + Ok( + self + .storage + .set_peer_remote_clock( + peer, + doc_id, + chrono::DateTime::::from(clock).naive_utc(), + ) + .await?, + ) + } + + pub async fn get_peer_pulled_remote_clocks( + &self, + peer: String, + ) -> Result, UniffiError> { + Ok( + self + .storage + .get_peer_pulled_remote_clocks(peer) + .await? + .into_iter() + .map(Into::into) + .collect(), + ) + } + + pub async fn get_peer_pulled_remote_clock( + &self, + peer: String, + doc_id: String, + ) -> Result { + Ok( + self + .storage + .get_peer_pulled_remote_clock(peer, doc_id) + .await? + .into(), + ) + } + + pub async fn set_peer_pulled_remote_clock( + &self, + peer: String, + doc_id: String, + clock: SystemTime, + ) -> Result<(), UniffiError> { + Ok( + self + .storage + .set_peer_pulled_remote_clock( + peer, + doc_id, + chrono::DateTime::::from(clock).naive_utc(), + ) + .await?, + ) + } + + pub async fn get_peer_pushed_clocks(&self, peer: String) -> Result, UniffiError> { + Ok( + self + .storage + .get_peer_pushed_clocks(peer) + .await? + .into_iter() + .map(Into::into) + .collect(), + ) + } + + pub async fn set_peer_pushed_clock( + &self, + peer: String, + doc_id: String, + clock: SystemTime, + ) -> Result<(), UniffiError> { + Ok( + self + .storage + .set_peer_pushed_clock( + peer, + doc_id, + chrono::DateTime::::from(clock).naive_utc(), + ) + .await?, + ) + } + + pub async fn clear_clocks(&self) -> Result<(), UniffiError> { + Ok(self.storage.clear_clocks().await?) + } +} diff --git a/packages/frontend/native/Cargo.toml b/packages/frontend/native/Cargo.toml index c30c76c88193f..f22a5e105af33 100644 --- a/packages/frontend/native/Cargo.toml +++ b/packages/frontend/native/Cargo.toml @@ -4,19 +4,19 @@ name = "affine_native" version = "0.0.0" [lib] -crate-type = ["rlib", "cdylib"] +crate-type = ["cdylib", "rlib"] [dependencies] -affine_common = { workspace = true } +affine_common = { workspace = true } +affine_nbstore = { path = "./nbstore" } affine_sqlite_v1 = { path = "./sqlite_v1" } -affine_nbstore = { path = "./nbstore" } -napi = { workspace = true } -napi-derive = { workspace = true } -once_cell = { workspace = true } -sqlx = { workspace = true, default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } -tokio = { workspace = true, features = ["full"] } +napi = { workspace = true } +napi-derive = { workspace = true } +once_cell = { workspace = true } +sqlx = { workspace = true, default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } +tokio = { workspace = true, features = ["full"] } [build-dependencies] -napi-build = { workspace = true } -sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } -tokio = { workspace = true, features = ["full"] } +napi-build = { workspace = true } +sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } +tokio = { workspace = true, features = ["full"] } diff --git a/packages/frontend/native/nbstore/Cargo.toml b/packages/frontend/native/nbstore/Cargo.toml index 99293cada0343..3afb683a64db1 100644 --- a/packages/frontend/native/nbstore/Cargo.toml +++ b/packages/frontend/native/nbstore/Cargo.toml @@ -4,7 +4,10 @@ name = "affine_nbstore" version = "0.0.0" [lib] -crate-type = ["rlib", "cdylib"] +crate-type = ["cdylib", "rlib"] + +[features] +noop = ["napi/noop", "napi-derive/noop"] [dependencies] affine_schema = { path = "../schema" } @@ -15,9 +18,12 @@ napi-derive = { workspace = true } sqlx = { workspace = true, default-features = false, features = ["chrono", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } tokio = { workspace = true, features = ["full"] } +[target.'cfg(any(target_os = "ios", target_os = "android"))'.dependencies] +uniffi = { workspace = true } + [build-dependencies] affine_schema = { path = "../schema" } -dotenvy = { workspace = true } +dotenvy = { workspace = true } napi-build = { workspace = true } sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } -tokio = { workspace = true, features = ["full"] } \ No newline at end of file +tokio = { workspace = true, features = ["full"] } diff --git a/packages/frontend/native/nbstore/src/blob.rs b/packages/frontend/native/nbstore/src/blob.rs index 05e00d26e44c6..17996168ae96d 100644 --- a/packages/frontend/native/nbstore/src/blob.rs +++ b/packages/frontend/native/nbstore/src/blob.rs @@ -1,3 +1,5 @@ +use std::ops::Deref; + use super::{storage::SqliteDocStorage, Blob, ListedBlob, SetBlob}; type Result = std::result::Result; @@ -22,7 +24,7 @@ impl SqliteDocStorage { DO UPDATE SET data=$2, mime=$3, size=$4, deleted_at=NULL;"#, ) .bind(blob.key) - .bind(blob.data.as_ref()) + .bind(blob.data.deref()) .bind(blob.mime) .bind(blob.data.len() as i64) .execute(&self.pool) @@ -67,7 +69,6 @@ impl SqliteDocStorage { #[cfg(test)] mod tests { - use napi::bindgen_prelude::Uint8Array; use sqlx::Row; use super::*; @@ -87,7 +88,7 @@ mod tests { storage .set_blob(SetBlob { key: format!("test_{}", i), - data: Uint8Array::from(vec![0, 0]), + data: vec![0, 0].into(), mime: "text/plain".to_string(), }) .await @@ -127,7 +128,7 @@ mod tests { storage .set_blob(SetBlob { key: format!("test_{}", i), - data: Uint8Array::from(vec![0, 0]), + data: vec![0, 0].into(), mime: "text/plain".to_string(), }) .await @@ -175,7 +176,7 @@ mod tests { storage .set_blob(SetBlob { key: format!("test_{}", i), - data: Uint8Array::from(vec![0, 0]), + data: vec![0, 0].into(), mime: "text/plain".to_string(), }) .await diff --git a/packages/frontend/native/nbstore/src/doc.rs b/packages/frontend/native/nbstore/src/doc.rs index d44b972d0fa71..28088da6f1339 100644 --- a/packages/frontend/native/nbstore/src/doc.rs +++ b/packages/frontend/native/nbstore/src/doc.rs @@ -1,3 +1,5 @@ +use std::ops::Deref; + use chrono::NaiveDateTime; use sqlx::{QueryBuilder, Row}; @@ -110,7 +112,7 @@ impl SqliteDocStorage { WHERE updated_at <= $3;"#, ) .bind(snapshot.doc_id) - .bind(snapshot.data.as_ref()) + .bind(snapshot.data.deref()) .bind(snapshot.timestamp) .execute(&self.pool) .await?; @@ -206,7 +208,6 @@ impl SqliteDocStorage { #[cfg(test)] mod tests { use chrono::{DateTime, Utc}; - use napi::bindgen_prelude::Uint8Array; use super::*; @@ -252,7 +253,7 @@ mod tests { storage .set_doc_snapshot(DocRecord { doc_id: "test".to_string(), - data: Uint8Array::from(vec![0, 0]), + data: vec![0, 0].into(), timestamp: Utc::now().naive_utc(), }) .await @@ -331,7 +332,7 @@ mod tests { let snapshot = DocRecord { doc_id: "test".to_string(), - data: Uint8Array::from(vec![0, 0]), + data: vec![0, 0].into(), timestamp: Utc::now().naive_utc(), }; @@ -349,7 +350,7 @@ mod tests { let snapshot = DocRecord { doc_id: "test".to_string(), - data: Uint8Array::from(vec![0, 0]), + data: vec![0, 0].into(), timestamp: Utc::now().naive_utc(), }; @@ -362,7 +363,7 @@ mod tests { let snapshot = DocRecord { doc_id: "test".to_string(), - data: Uint8Array::from(vec![0, 1]), + data: vec![0, 1].into(), timestamp: DateTime::from_timestamp_millis(Utc::now().timestamp_millis() - 1000) .unwrap() .naive_utc(), diff --git a/packages/frontend/native/nbstore/src/lib.rs b/packages/frontend/native/nbstore/src/lib.rs index 4b6e088b3ab8d..ec4968a70f8f3 100644 --- a/packages/frontend/native/nbstore/src/lib.rs +++ b/packages/frontend/native/nbstore/src/lib.rs @@ -1,27 +1,47 @@ -mod blob; -mod doc; -mod storage; -mod sync; +pub mod blob; +pub mod doc; +pub mod storage; +pub mod sync; use chrono::NaiveDateTime; use napi::bindgen_prelude::*; use napi_derive::napi; -fn map_err(err: sqlx::Error) -> napi::Error { - napi::Error::from(anyhow::Error::from(err)) +#[cfg(feature = "noop")] +type Result = anyhow::Result; + +#[cfg(not(feature = "noop"))] +type Result = napi::Result; + +#[cfg(not(feature = "noop"))] +fn map_err(err: sqlx::Error) -> Error { + Error::from(anyhow::Error::from(err)) +} + +#[cfg(feature = "noop")] +fn map_err(err: sqlx::Error) -> anyhow::Error { + anyhow::Error::from(err) } +#[cfg(feature = "noop")] +pub type Data = Vec; + +#[cfg(not(feature = "noop"))] +pub type Data = Uint8Array; + #[napi(object)] pub struct DocUpdate { pub doc_id: String, pub created_at: NaiveDateTime, - pub data: Uint8Array, + #[napi(ts_type = "Uint8Array")] + pub data: Data, } #[napi(object)] pub struct DocRecord { pub doc_id: String, - pub data: Uint8Array, + #[napi(ts_type = "Uint8Array")] + pub data: Data, pub timestamp: NaiveDateTime, } @@ -35,14 +55,16 @@ pub struct DocClock { #[napi(object)] pub struct SetBlob { pub key: String, - pub data: Uint8Array, + #[napi(ts_type = "Uint8Array")] + pub data: Data, pub mime: String, } #[napi(object)] pub struct Blob { pub key: String, - pub data: Uint8Array, + #[napi(ts_type = "Uint8Array")] + pub data: Data, pub mime: String, pub size: i64, pub created_at: NaiveDateTime, @@ -64,7 +86,7 @@ pub struct DocStorage { #[napi] impl DocStorage { #[napi(constructor, async_runtime)] - pub fn new(path: String) -> napi::Result { + pub fn new(path: String) -> Result { Ok(Self { storage: storage::SqliteDocStorage::new(path), }) @@ -72,19 +94,19 @@ impl DocStorage { #[napi] /// Initialize the database and run migrations. - pub async fn connect(&self) -> napi::Result<()> { + pub async fn connect(&self) -> Result<()> { self.storage.connect().await.map_err(map_err) } #[napi] - pub async fn close(&self) -> napi::Result<()> { + pub async fn close(&self) -> Result<()> { self.storage.close().await; Ok(()) } #[napi(getter)] - pub async fn is_closed(&self) -> napi::Result { + pub async fn is_closed(&self) -> Result { Ok(self.storage.is_closed()) } @@ -93,26 +115,22 @@ impl DocStorage { * See https://www.sqlite.org/pragma.html#pragma_wal_checkpoint:~:text=PRAGMA%20schema.wal_checkpoint%3B */ #[napi] - pub async fn checkpoint(&self) -> napi::Result<()> { + pub async fn checkpoint(&self) -> Result<()> { self.storage.checkpoint().await.map_err(map_err) } #[napi] - pub async fn validate(&self) -> napi::Result { + pub async fn validate(&self) -> Result { self.storage.validate().await.map_err(map_err) } #[napi] - pub async fn set_space_id(&self, space_id: String) -> napi::Result<()> { + pub async fn set_space_id(&self, space_id: String) -> Result<()> { self.storage.set_space_id(space_id).await.map_err(map_err) } #[napi] - pub async fn push_update( - &self, - doc_id: String, - update: Uint8Array, - ) -> napi::Result { + pub async fn push_update(&self, doc_id: String, update: Uint8Array) -> Result { self .storage .push_update(doc_id, update) @@ -121,12 +139,12 @@ impl DocStorage { } #[napi] - pub async fn get_doc_snapshot(&self, doc_id: String) -> napi::Result> { + pub async fn get_doc_snapshot(&self, doc_id: String) -> Result> { self.storage.get_doc_snapshot(doc_id).await.map_err(map_err) } #[napi] - pub async fn set_doc_snapshot(&self, snapshot: DocRecord) -> napi::Result { + pub async fn set_doc_snapshot(&self, snapshot: DocRecord) -> Result { self .storage .set_doc_snapshot(snapshot) @@ -135,7 +153,7 @@ impl DocStorage { } #[napi] - pub async fn get_doc_updates(&self, doc_id: String) -> napi::Result> { + pub async fn get_doc_updates(&self, doc_id: String) -> Result> { self.storage.get_doc_updates(doc_id).await.map_err(map_err) } @@ -144,7 +162,7 @@ impl DocStorage { &self, doc_id: String, updates: Vec, - ) -> napi::Result { + ) -> Result { self .storage .mark_updates_merged(doc_id, updates) @@ -153,32 +171,32 @@ impl DocStorage { } #[napi] - pub async fn delete_doc(&self, doc_id: String) -> napi::Result<()> { + pub async fn delete_doc(&self, doc_id: String) -> Result<()> { self.storage.delete_doc(doc_id).await.map_err(map_err) } #[napi] - pub async fn get_doc_clocks(&self, after: Option) -> napi::Result> { + pub async fn get_doc_clocks(&self, after: Option) -> Result> { self.storage.get_doc_clocks(after).await.map_err(map_err) } #[napi] - pub async fn get_doc_clock(&self, doc_id: String) -> napi::Result> { + pub async fn get_doc_clock(&self, doc_id: String) -> Result> { self.storage.get_doc_clock(doc_id).await.map_err(map_err) } #[napi] - pub async fn get_blob(&self, key: String) -> napi::Result> { + pub async fn get_blob(&self, key: String) -> Result> { self.storage.get_blob(key).await.map_err(map_err) } #[napi] - pub async fn set_blob(&self, blob: SetBlob) -> napi::Result<()> { + pub async fn set_blob(&self, blob: SetBlob) -> Result<()> { self.storage.set_blob(blob).await.map_err(map_err) } #[napi] - pub async fn delete_blob(&self, key: String, permanently: bool) -> napi::Result<()> { + pub async fn delete_blob(&self, key: String, permanently: bool) -> Result<()> { self .storage .delete_blob(key, permanently) @@ -187,17 +205,17 @@ impl DocStorage { } #[napi] - pub async fn release_blobs(&self) -> napi::Result<()> { + pub async fn release_blobs(&self) -> Result<()> { self.storage.release_blobs().await.map_err(map_err) } #[napi] - pub async fn list_blobs(&self) -> napi::Result> { + pub async fn list_blobs(&self) -> Result> { self.storage.list_blobs().await.map_err(map_err) } #[napi] - pub async fn get_peer_remote_clocks(&self, peer: String) -> napi::Result> { + pub async fn get_peer_remote_clocks(&self, peer: String) -> Result> { self .storage .get_peer_remote_clocks(peer) @@ -206,11 +224,7 @@ impl DocStorage { } #[napi] - pub async fn get_peer_remote_clock( - &self, - peer: String, - doc_id: String, - ) -> napi::Result { + pub async fn get_peer_remote_clock(&self, peer: String, doc_id: String) -> Result { self .storage .get_peer_remote_clock(peer, doc_id) @@ -224,7 +238,7 @@ impl DocStorage { peer: String, doc_id: String, clock: NaiveDateTime, - ) -> napi::Result<()> { + ) -> Result<()> { self .storage .set_peer_remote_clock(peer, doc_id, clock) @@ -233,7 +247,7 @@ impl DocStorage { } #[napi] - pub async fn get_peer_pulled_remote_clocks(&self, peer: String) -> napi::Result> { + pub async fn get_peer_pulled_remote_clocks(&self, peer: String) -> Result> { self .storage .get_peer_pulled_remote_clocks(peer) @@ -246,7 +260,7 @@ impl DocStorage { &self, peer: String, doc_id: String, - ) -> napi::Result { + ) -> Result { self .storage .get_peer_pulled_remote_clock(peer, doc_id) @@ -260,7 +274,7 @@ impl DocStorage { peer: String, doc_id: String, clock: NaiveDateTime, - ) -> napi::Result<()> { + ) -> Result<()> { self .storage .set_peer_pulled_remote_clock(peer, doc_id, clock) @@ -269,7 +283,7 @@ impl DocStorage { } #[napi] - pub async fn get_peer_pushed_clocks(&self, peer: String) -> napi::Result> { + pub async fn get_peer_pushed_clocks(&self, peer: String) -> Result> { self .storage .get_peer_pushed_clocks(peer) @@ -278,11 +292,7 @@ impl DocStorage { } #[napi] - pub async fn get_peer_pushed_clock( - &self, - peer: String, - doc_id: String, - ) -> napi::Result { + pub async fn get_peer_pushed_clock(&self, peer: String, doc_id: String) -> Result { self .storage .get_peer_pushed_clock(peer, doc_id) @@ -296,7 +306,7 @@ impl DocStorage { peer: String, doc_id: String, clock: NaiveDateTime, - ) -> napi::Result<()> { + ) -> Result<()> { self .storage .set_peer_pushed_clock(peer, doc_id, clock) @@ -305,7 +315,7 @@ impl DocStorage { } #[napi] - pub async fn clear_clocks(&self) -> napi::Result<()> { + pub async fn clear_clocks(&self) -> Result<()> { self.storage.clear_clocks().await.map_err(map_err) } } diff --git a/packages/frontend/native/package.json b/packages/frontend/native/package.json index e0ebe3ddc7bcd..6c454e43bdba2 100644 --- a/packages/frontend/native/package.json +++ b/packages/frontend/native/package.json @@ -44,8 +44,8 @@ }, "scripts": { "artifacts": "napi artifacts", - "build": "napi build --platform --release --no-const-enum", - "build:debug": "napi build --platform", + "build": "napi build -p affine_native --platform --release --no-const-enum", + "build:debug": "napi build -p affine_native --platform", "universal": "napi universal", "test": "ava", "version": "napi version" diff --git a/packages/frontend/native/schema/Cargo.toml b/packages/frontend/native/schema/Cargo.toml index 79e53049dffe9..2180e23d44583 100644 --- a/packages/frontend/native/schema/Cargo.toml +++ b/packages/frontend/native/schema/Cargo.toml @@ -4,4 +4,4 @@ name = "affine_schema" version = "0.0.0" [dependencies] -sqlx = { workspace = true, default-features = false, features = ["migrate"] } \ No newline at end of file +sqlx = { workspace = true, default-features = false, features = ["migrate"] } diff --git a/packages/frontend/native/sqlite_v1/Cargo.toml b/packages/frontend/native/sqlite_v1/Cargo.toml index 3dde2e558c3bf..b6d1cd6555a8b 100644 --- a/packages/frontend/native/sqlite_v1/Cargo.toml +++ b/packages/frontend/native/sqlite_v1/Cargo.toml @@ -4,7 +4,7 @@ name = "affine_sqlite_v1" version = "0.0.0" [lib] -crate-type = ["rlib", "cdylib"] +crate-type = ["cdylib", "rlib"] [dependencies] affine_schema = { path = "../schema" } @@ -17,7 +17,7 @@ tokio = { workspace = true, features = ["full"] } [build-dependencies] affine_schema = { path = "../schema" } -dotenvy = { workspace = true } +dotenvy = { workspace = true } napi-build = { workspace = true } sqlx = { workspace = true, default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio", "sqlite", "tls-rustls"] } tokio = { workspace = true, features = ["full"] } diff --git a/tools/commitlint/.commitlintrc.json b/tools/commitlint/.commitlintrc.json index 35de033e6ff06..ac82f3ece66db 100644 --- a/tools/commitlint/.commitlintrc.json +++ b/tools/commitlint/.commitlintrc.json @@ -13,6 +13,7 @@ "mobile", "ios", "android", + "mobile-native", "docs", "component", "env", diff --git a/yarn.lock b/yarn.lock index 857b9eb226b8a..f011b84cb272d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -577,6 +577,7 @@ __metadata: "@affine/component": "workspace:*" "@affine/core": "workspace:*" "@affine/i18n": "workspace:*" + "@affine/native": "workspace:*" "@blocksuite/affine": "workspace:*" "@blocksuite/icons": "npm:2.1.75" "@capacitor/app": "npm:^6.0.2" @@ -595,6 +596,7 @@ __metadata: react-dom: "npm:^19.0.0" react-router-dom: "npm:^6.28.0" typescript: "npm:^5.7.2" + yjs: "npm:13.6.18" languageName: unknown linkType: soft