From 3969823ff4f02de3b96152dae7f39584e30a10cd Mon Sep 17 00:00:00 2001 From: Oleg Chendighelean Date: Tue, 17 Dec 2024 14:51:18 +0000 Subject: [PATCH] Refine backupData --- apps/desktop/public/electron.js | 102 ++++++++++++++++++------------- packages/state/src/global.d.ts | 9 +++ packages/state/src/migrations.ts | 14 ----- packages/state/src/reducer.ts | 48 ++++++--------- 4 files changed, 86 insertions(+), 87 deletions(-) create mode 100644 packages/state/src/global.d.ts diff --git a/apps/desktop/public/electron.js b/apps/desktop/public/electron.js index df80aac10..0a8cf0cb2 100644 --- a/apps/desktop/public/electron.js +++ b/apps/desktop/public/electron.js @@ -10,9 +10,10 @@ const fs = require("fs"); const APP_PROTOCOL = "app"; const APP_HOST = "assets"; -// create in memory store of the leveldb database of previous version which had file:// protocol +// backupData is used to store the backup data from the previous version of the app let backupData; + const appURL = app.isPackaged ? url.format({ pathname: `${APP_HOST}/index.html`, @@ -35,17 +36,20 @@ protocol.registerSchemesAsPrivileged([ // Configure electron-log log.transports.file.file = path.join(app.getPath("userData"), "Local Storage", "umami-desktop.log"); -async function readAndCopyValues() { - // Path to the LevelDB database +async function createBackupFromPrevDB() { const dbPath = path.join(app.getPath("userData"), "Local Storage", "leveldb"); + const backupPath = path.join(app.getPath("userData"), "Local Storage", "backup_leveldb.json"); + + if (fs.existsSync(backupPath)) { + console.log("Backup file already exists. Skipping migration."); + return; + } - // Check if the LevelDB database exists if (!fs.existsSync(dbPath)) { log.info("LevelDB database not found at path. Code:EM01", dbPath); return; } - // Open the LevelDB database const db = new Level(dbPath); await db.open(); @@ -54,54 +58,70 @@ async function readAndCopyValues() { // Function to clean up the string (removing non-printable chars) function cleanString(str) { - // Remove non-printable characters like \x00 and \x01 - str = str.replace(/[\x00\x01\x17\x10\x0f]/g, ""); // Removing some common control chars + str = str.replace(/[\x00\x01\x17\x10\x0f]/g, ""); - // Optionally, you could try Base64 decoding here if you're suspecting such encoding - // Example: if(str.includes("base64")) { str = Buffer.from(str, 'base64').toString(); } return str; } - // Function to check if a string is valid JSON - function isValidJSON(str) { - try { - JSON.parse(str); - return true; - } catch (e) { - return false; + const KEYS_TO_MIGRATE = ["_file://\x00\x01persist:accounts", "_file://\x00\x01persist:root"]; + const ROOT_KEYS_TO_MIGRATE = [ + "batches", + "beacon", + "networks", + "contacts", + "errors", + "protocolSettings", + "_persist", + ]; + + const extractKeys = json => { + const regexp = /"([^"]+)":("[^"\\]*(?:\\.[^"\\]*)*"|{[^}]+})/g; + + const result = {}; + const matches = json.matchAll(regexp); + + for (const [_, key, value] of matches) { + if (ROOT_KEYS_TO_MIGRATE.includes(key)) { + try { + // Try to parse the value if it's a valid JSON + result[key] = JSON.parse(value); + } catch { + // If parsing fails, store as raw string + result[key] = value.replace(/^"|"$/g, ""); + } + } } - } - for await (const [key, value] of db.iterator()) { - if ( - !key.includes("_file://\x00\x01persist:accounts") || - !key.includes("_file://\x00\x01persist:root") - ) { - continue; - } + return result; + }; - // Clean the value string before storing - let cleanedValue = cleanString(value); + for await (const [_key, value] of db.iterator()) { + if (KEYS_TO_MIGRATE.includes(_key)) { + let cleanedValue = cleanString(value); + + const key = _key.includes("_file://\x00\x01persist:root") + ? "persist:root" + : "persist:accounts"; - // Try parsing the cleaned string as JSON - if (isValidJSON(cleanedValue)) { try { - storage[key.includes("accounts") ? "persist:accounts" : "persist:root"] = - JSON.parse(cleanedValue); - } catch (error) { - console.error(`Error parsing JSON for key: ${key}, value: ${cleanedValue}`); - storage[key] = cleanedValue; // Store as raw value if JSON parsing fails + storage[key] = JSON.parse(cleanedValue); + } catch (_) { + // Store as raw value if JSON parsing fails + storage[key] = cleanedValue; } - } else { - // If not valid JSON, store the raw cleaned string - storage[key] = cleanedValue; } } + const preparedStorage = { + ...storage, + "persist:root": extractKeys(storage["persist:root"]), + }; + + backupData = preparedStorage; + // Write storage object to JSON file - const backupPath = path.join(app.getPath("userData"), "Local Storage", "backup_leveldb.json"); try { - fs.writeFileSync(backupPath, JSON.stringify(storage, null, 2), "utf-8"); + fs.writeFileSync(backupPath, JSON.stringify(preparedStorage, null, 2), "utf-8"); log.info("Backup successfully created at:", backupPath); } catch (err) { log.error("Error during LevelDB backup creation. Code:EM2.", err); @@ -109,7 +129,6 @@ async function readAndCopyValues() { } catch (err) { log.error("Error during key migration. Code:EM4.", err); } finally { - // Close the database db.close().catch(err => { log.error("Error closing the database. Code:EM5", err); }); @@ -264,7 +283,6 @@ function start() { app.quit(); return; } - let waitForMigration = true; // Check for app updates, download and notify UI if update is available to be installed. try { @@ -320,8 +338,8 @@ function start() { // is ready to create the browser windows. // Some APIs can only be used after this event occurs. app.whenReady().then(async () => { - // Execute readAndCopyValues at the beginning - await readAndCopyValues(); + // Execute createBackupFromPrevDB at the beginning + await createBackupFromPrevDB(); createWindow(); }); diff --git a/packages/state/src/global.d.ts b/packages/state/src/global.d.ts new file mode 100644 index 000000000..60c66fe14 --- /dev/null +++ b/packages/state/src/global.d.ts @@ -0,0 +1,9 @@ +export {}; + +declare global { + interface Window { + electronAPI?: { + onBackupData: (fn: (event: any, data?: Record) => void) => void; + }; + } +} diff --git a/packages/state/src/migrations.ts b/packages/state/src/migrations.ts index 8c64a1bfa..e563a456d 100644 --- a/packages/state/src/migrations.ts +++ b/packages/state/src/migrations.ts @@ -5,20 +5,6 @@ import { fromPairs, identity } from "lodash"; import { announcementInitialState as announcementsInitialState } from "./slices/announcement"; -function handleBackupData(callback: (data: any) => void) { - // @ts-ignore - // window.electronAPI.triggerBackupData(); - // @ts-ignore - localStorage.setItem("test", "true"); - // @ts-ignore - window.electronAPI.onBackupData(backupData => { - console.log(backupData); - localStorage.setItem("backup-received", "true"); - localStorage.setItem("backupData", backupData); - callback(backupData); - }); -} - export const VERSION = 9; export const mainStoreMigrations = { diff --git a/packages/state/src/reducer.ts b/packages/state/src/reducer.ts index cb5416c42..b10027a67 100644 --- a/packages/state/src/reducer.ts +++ b/packages/state/src/reducer.ts @@ -41,32 +41,23 @@ const getTestStorage = () => { export const processMigrationData = (backupData: any) => { try { const processedData: { accounts: any; root: any } = { - accounts: null, - root: null, + accounts: {}, + root: {}, }; - console.log(backupData, "backupData"); + if (backupData["persist:accounts"]) { + const accounts = backupData["persist:accounts"]; - if (backupData["persist:accounts"]?.accountsValue) { - const accountsValue = backupData["persist:accounts"].accountsValue.slice(1); - processedData.accounts = JSON.parse(accountsValue); - - for (const item in processedData.accounts) { - processedData.accounts[item] = JSON.parse(processedData.accounts[item]); + for (const item in accounts) { + processedData.accounts[item] = JSON.parse(accounts[item]); } } - if (backupData["persist:root"]?.rootValue) { - const sanitizedRootValue = backupData["persist:root"].rootValue.replaceAll( - // eslint-disable-next-line no-control-regex - /[\u0000-\u001F\u007F-\u009F]/g, - "" - ); - - processedData.root = JSON.parse(sanitizedRootValue); + if (backupData["persist:root"]) { + const root = backupData["persist:root"]; - for (const item in processedData.root) { - processedData.root[item] = JSON.parse(processedData.root[item]); + for (const item in root) { + processedData.root[item] = JSON.parse(root[item]); } } @@ -80,29 +71,24 @@ export const processMigrationData = (backupData: any) => { export const makeReducer = (storage_: Storage | undefined) => { const storage = storage_ || getTestStorage() || createWebStorage("local"); - // Custom getStoredState function to handle migration + // Custom getStoredState function to handle migration from desktop v2.3.3 to v2.3.4 const customGetStoredState = async (config: PersistConfig): Promise => { try { - // First try to get state from current storage const state = (await getStoredState(config)) as PersistedState; - console.log(state, "state"); + if (state) { return state; } - // If no state, check if we have backup data - // @ts-ignore + // If no state, check if we have backup data and migrate it to the new state if (window.electronAPI) { return new Promise(resolve => { - // @ts-ignore - window.electronAPI.onBackupData((_, data) => { + window.electronAPI?.onBackupData((_, data) => { if (data) { const processed = processMigrationData(data); - console.log(processed, "processed"); + if (processed) { - // Return the processed state based on config key - // @ts-ignore - return resolve(config.key === "root" ? processed.root : processed.accounts); + return resolve(processed[config.key as keyof typeof processed]); } } resolve(undefined); @@ -111,7 +97,7 @@ export const makeReducer = (storage_: Storage | undefined) => { } } catch (err) { console.error("Error getting stored state:", err); - return undefined; + return; } };