From 0b626d4684cab449eddaac1819d9a71377b1b7ac Mon Sep 17 00:00:00 2001 From: Joel Jeremy Marquez Date: Sat, 2 Dec 2023 12:31:10 -0800 Subject: [PATCH] Another round of prefer-const rule updates --- .eslintrc.js | 2 +- .../src/server/accounts/export-to-csv.ts | 10 +- .../loot-core/src/server/accounts/link.ts | 18 +- .../src/server/accounts/parse-file.test.ts | 19 +- .../src/server/accounts/parse-file.ts | 24 +-- .../loot-core/src/server/accounts/payees.ts | 4 +- .../loot-core/src/server/accounts/qif2json.ts | 12 +- .../src/server/accounts/rules.test.ts | 44 ++-- .../loot-core/src/server/accounts/rules.ts | 78 +++---- .../src/server/accounts/sync.test.ts | 86 ++++---- .../loot-core/src/server/accounts/sync.ts | 138 ++++++------ .../src/server/accounts/title/index.ts | 6 +- .../server/accounts/transaction-rules.test.ts | 41 ++-- .../src/server/accounts/transaction-rules.ts | 122 +++++------ .../src/server/accounts/transactions.ts | 42 ++-- .../src/server/accounts/transfer.test.ts | 12 +- .../loot-core/src/server/accounts/transfer.ts | 12 +- packages/loot-core/src/server/api-models.ts | 6 +- packages/loot-core/src/server/api.ts | 68 +++--- packages/loot-core/src/server/app.ts | 6 +- .../loot-core/src/server/aql/compiler.test.ts | 16 +- packages/loot-core/src/server/aql/compiler.ts | 196 +++++++++-------- .../loot-core/src/server/aql/exec.test.ts | 30 +-- packages/loot-core/src/server/aql/exec.ts | 18 +- .../src/server/aql/schema-helpers.test.ts | 12 +- .../src/server/aql/schema-helpers.ts | 28 +-- .../src/server/aql/schema/executors.test.ts | 75 +++---- .../src/server/aql/schema/executors.ts | 34 +-- .../loot-core/src/server/aql/schema/index.ts | 14 +- .../loot-core/src/server/aql/views.test.ts | 6 +- packages/loot-core/src/server/aql/views.ts | 20 +- packages/loot-core/src/server/backups.ts | 14 +- .../loot-core/src/server/budget/actions.ts | 78 +++---- packages/loot-core/src/server/budget/app.ts | 2 +- .../loot-core/src/server/budget/base.test.ts | 2 +- packages/loot-core/src/server/budget/base.ts | 108 +++++----- .../src/server/budget/cleanup-template.ts | 68 +++--- .../src/server/budget/goals/goalsBy.ts | 5 +- .../server/budget/goals/goalsPercentage.ts | 10 +- .../src/server/budget/goals/goalsRemainder.ts | 6 +- .../src/server/budget/goals/goalsSchedule.ts | 28 +-- .../src/server/budget/goals/goalsSimple.ts | 2 +- .../src/server/budget/goals/goalsSpend.ts | 16 +- .../src/server/budget/goals/goalsWeek.ts | 6 +- .../src/server/budget/goaltemplates.ts | 96 +++++---- .../loot-core/src/server/budget/report.ts | 4 +- .../loot-core/src/server/budget/rollover.ts | 10 +- packages/loot-core/src/server/budget/util.ts | 2 +- .../loot-core/src/server/cloud-storage.ts | 67 +++--- packages/loot-core/src/server/db/index.ts | 44 ++-- packages/loot-core/src/server/db/mappings.ts | 6 +- packages/loot-core/src/server/db/sort.ts | 2 +- packages/loot-core/src/server/db/util.ts | 16 +- .../src/server/encryption-internals.ts | 27 +-- .../src/server/encryption-internals.web.ts | 28 +-- .../loot-core/src/server/encryption.test.ts | 6 +- packages/loot-core/src/server/encryption.ts | 2 +- packages/loot-core/src/server/filters/app.ts | 20 +- .../loot-core/src/server/importers/actual.ts | 2 +- .../loot-core/src/server/importers/index.ts | 4 +- .../loot-core/src/server/importers/ynab4.ts | 56 ++--- .../loot-core/src/server/importers/ynab5.ts | 43 ++-- packages/loot-core/src/server/main.test.ts | 31 +-- packages/loot-core/src/server/main.ts | 202 ++++++++++-------- .../src/server/migrate/migrations.test.ts | 10 +- .../src/server/migrate/migrations.ts | 20 +- packages/loot-core/src/server/models.ts | 8 +- packages/loot-core/src/server/mutators.ts | 8 +- packages/loot-core/src/server/notes/app.ts | 2 +- packages/loot-core/src/server/post.ts | 4 +- packages/loot-core/src/server/prefs.ts | 4 +- packages/loot-core/src/server/rules/app.ts | 20 +- .../src/server/schedules/app.test.ts | 24 +-- .../loot-core/src/server/schedules/app.ts | 104 ++++----- .../src/server/schedules/find-schedules.ts | 72 ++++--- .../loot-core/src/server/server-config.ts | 2 +- packages/loot-core/src/server/sheet.test.ts | 4 +- packages/loot-core/src/server/sheet.ts | 38 ++-- .../spreadsheet/graph-data-structure.ts | 12 +- .../server/spreadsheet/spreadsheet.test.ts | 2 +- .../src/server/spreadsheet/spreadsheet.ts | 44 ++-- .../loot-core/src/server/spreadsheet/util.ts | 2 +- packages/loot-core/src/server/sync/encoder.ts | 32 +-- packages/loot-core/src/server/sync/index.ts | 88 ++++---- .../src/server/sync/make-test-message.ts | 4 +- .../loot-core/src/server/sync/migrate.test.ts | 38 ++-- packages/loot-core/src/server/sync/migrate.ts | 4 +- packages/loot-core/src/server/sync/repair.ts | 6 +- packages/loot-core/src/server/sync/reset.ts | 12 +- .../src/server/sync/sync.property.test.ts | 46 ++-- .../loot-core/src/server/sync/sync.test.ts | 24 +-- .../src/server/tests/mockSyncServer.ts | 18 +- packages/loot-core/src/server/tools/app.ts | 14 +- packages/loot-core/src/server/undo.ts | 44 ++-- packages/loot-core/src/server/update.ts | 10 +- .../loot-core/src/server/util/budget-name.ts | 2 +- 96 files changed, 1506 insertions(+), 1428 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index b2d03eb5b43..ecf6b4dd9ba 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -252,7 +252,7 @@ module.exports = { './packages/loot-core/src/client/**/*', './packages/loot-core/src/mocks/**/*', './packages/loot-core/src/platform/**/*', - // './packages/loot-core/src/server/**/*', + './packages/loot-core/src/server/**/*', './packages/loot-core/src/shared/**/*', './packages/loot-core/src/types/**/*', './packages/loot-core/webpack/**/*', diff --git a/packages/loot-core/src/server/accounts/export-to-csv.ts b/packages/loot-core/src/server/accounts/export-to-csv.ts index 20cc5978483..da8aa17d621 100644 --- a/packages/loot-core/src/server/accounts/export-to-csv.ts +++ b/packages/loot-core/src/server/accounts/export-to-csv.ts @@ -45,7 +45,7 @@ export async function exportToCSV( } export async function exportQueryToCSV(query) { - let { data: transactions } = await aqlQuery( + const { data: transactions } = await aqlQuery( query .select([ { Id: 'id' }, @@ -61,18 +61,18 @@ export async function exportQueryToCSV(query) { .options({ splits: 'all' }), ); - let parentsPayees = new Map(); - for (let trans of transactions) { + const parentsPayees = new Map(); + for (const trans of transactions) { if (trans.IsParent) { parentsPayees.set(trans.Id, trans.Payee); } } // filter out any parent transactions - let noParents = transactions.filter(t => !t.IsParent); + const noParents = transactions.filter(t => !t.IsParent); // map final properties for export and grab the payee for splits from their parent transaction - let transactionsForExport = noParents.map(trans => { + const transactionsForExport = noParents.map(trans => { return { Account: trans.Account, Date: trans.Date, diff --git a/packages/loot-core/src/server/accounts/link.ts b/packages/loot-core/src/server/accounts/link.ts index 99229977f14..891f03a315a 100644 --- a/packages/loot-core/src/server/accounts/link.ts +++ b/packages/loot-core/src/server/accounts/link.ts @@ -10,7 +10,7 @@ import { getServer } from '../server-config'; import * as bankSync from './sync'; export async function handoffPublicToken(institution, publicToken) { - let [[, userId], [, key]] = await asyncStorage.multiGet([ + const [[, userId], [, key]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); @@ -19,7 +19,7 @@ export async function handoffPublicToken(institution, publicToken) { throw new Error('Invalid institution object'); } - let id = uuidv4(); + const id = uuidv4(); // Make sure to generate an access token first before inserting it // into our local database in case it fails @@ -42,7 +42,7 @@ export async function handoffPublicToken(institution, publicToken) { } export async function findOrCreateBank(institution, requisitionId) { - let bank = await db.first( + const bank = await db.first( 'SELECT id, bank_id, name FROM banks WHERE bank_id = ?', [requisitionId], ); @@ -63,7 +63,7 @@ export async function findOrCreateBank(institution, requisitionId) { } export async function addAccounts(bankId, accountIds, offbudgetIds = []) { - let [[, userId], [, userKey]] = await asyncStorage.multiGet([ + const [[, userId], [, userKey]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); @@ -76,8 +76,8 @@ export async function addAccounts(bankId, accountIds, offbudgetIds = []) { return Promise.all( accounts.map(async acct => { - let id = await runMutator(async () => { - let id = await db.insertAccount({ + const id = await runMutator(async () => { + const id = await db.insertAccount({ account_id: acct.account_id, name: acct.name, official_name: acct.official_name, @@ -109,7 +109,7 @@ export async function addGoCardlessAccounts( accountIds, offbudgetIds = [], ) { - let [[, userId], [, userKey]] = await asyncStorage.multiGet([ + const [[, userId], [, userKey]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); @@ -122,8 +122,8 @@ export async function addGoCardlessAccounts( return Promise.all( accounts.map(async acct => { - let id = await runMutator(async () => { - let id = await db.insertAccount({ + const id = await runMutator(async () => { + const id = await db.insertAccount({ account_id: acct.account_id, name: acct.name, official_name: acct.official_name, diff --git a/packages/loot-core/src/server/accounts/parse-file.test.ts b/packages/loot-core/src/server/accounts/parse-file.test.ts index 7c03544d531..57c347319a5 100644 --- a/packages/loot-core/src/server/accounts/parse-file.test.ts +++ b/packages/loot-core/src/server/accounts/parse-file.test.ts @@ -12,7 +12,7 @@ beforeEach(global.emptyDatabase()); // libofx spits out errors that contain the entire // source code of the file in the stack which makes // it hard to test. -let old = console.warn; +const old = console.warn; beforeAll(() => { console.warn = () => {}; }); @@ -35,11 +35,12 @@ async function importFileWithRealTime( ) { // Emscripten requires a real Date.now! global.restoreDateNow(); - let { errors, transactions } = await parseFile(filepath, { + const parseFileResult = await parseFile(filepath, { enableExperimentalOfxParser: true, }); global.restoreFakeDateNow(); + let transactions = parseFileResult.transactions; if (transactions) { // eslint-disable-next-line @typescript-eslint/no-explicit-any transactions = (transactions as any[]).map(trans => ({ @@ -50,12 +51,12 @@ async function importFileWithRealTime( : trans.date, })); } - + const errors = parseFileResult.errors; if (errors.length > 0) { return { errors, added: [] }; } - let { added } = await reconcileTransactions(accountId, transactions); + const { added } = await reconcileTransactions(accountId, transactions); return { errors, added }; } @@ -63,7 +64,7 @@ describe('File import', () => { test('qif import works', async () => { prefs.loadPrefs(); await db.insertAccount({ id: 'one', name: 'one' }); - let { errors } = await importFileWithRealTime( + const { errors } = await importFileWithRealTime( 'one', __dirname + '/../../mocks/files/data.qif', 'MM/dd/yy', @@ -76,7 +77,7 @@ describe('File import', () => { prefs.loadPrefs(); await db.insertAccount({ id: 'one', name: 'one' }); - let { errors } = await importFileWithRealTime( + const { errors } = await importFileWithRealTime( 'one', __dirname + '/../../mocks/files/data.ofx', ); @@ -88,7 +89,7 @@ describe('File import', () => { prefs.loadPrefs(); await db.insertAccount({ id: 'one', name: 'one' }); - let { errors } = await importFileWithRealTime( + const { errors } = await importFileWithRealTime( 'one', __dirname + '/../../mocks/files/credit-card.ofx', ); @@ -100,7 +101,7 @@ describe('File import', () => { prefs.loadPrefs(); await db.insertAccount({ id: 'one', name: 'one' }); - let { errors } = await importFileWithRealTime( + const { errors } = await importFileWithRealTime( 'one', __dirname + '/../../mocks/files/data.qfx', ); @@ -134,7 +135,7 @@ describe('File import', () => { prefs.loadPrefs(); await db.insertAccount({ id: 'one', name: 'one' }); - let { errors } = await importFileWithRealTime( + const { errors } = await importFileWithRealTime( 'one', __dirname + '/../../mocks/files/8859-1.qfx', 'yyyy-MM-dd', diff --git a/packages/loot-core/src/server/accounts/parse-file.ts b/packages/loot-core/src/server/accounts/parse-file.ts index 67c2430765d..f81e50b87b8 100644 --- a/packages/loot-core/src/server/accounts/parse-file.ts +++ b/packages/loot-core/src/server/accounts/parse-file.ts @@ -24,11 +24,11 @@ export async function parseFile( filepath: string, options?: ParseFileOptions, ): Promise { - let errors = Array(); - let m = filepath.match(/\.[^.]*$/); + const errors = Array(); + const m = filepath.match(/\.[^.]*$/); if (m) { - let ext = m[0]; + const ext = m[0]; switch (ext.toLowerCase()) { case '.qif': @@ -54,8 +54,8 @@ async function parseCSV( filepath: string, options?: ParseFileOptions, ): Promise { - let errors = Array(); - let contents = await fs.readFile(filepath); + const errors = Array(); + const contents = await fs.readFile(filepath); let data; try { @@ -81,8 +81,8 @@ async function parseCSV( } async function parseQIF(filepath: string): Promise { - let errors = Array(); - let contents = await fs.readFile(filepath); + const errors = Array(); + const contents = await fs.readFile(filepath); let data; try { @@ -131,7 +131,7 @@ async function parseOFX( // Banks don't always implement the OFX standard properly // If no payee is available try and fallback to memo - let useMemoFallback = options.fallbackMissingPayeeToMemo; + const useMemoFallback = options.fallbackMissingPayeeToMemo; return { errors, @@ -152,13 +152,13 @@ async function parseOFXNodeLibOFX( filepath: string, options: ParseFileOptions, ): Promise { - let { getOFXTransactions, initModule } = await import( + const { getOFXTransactions, initModule } = await import( /* webpackChunkName: 'xfo' */ 'node-libofx' ); await initModule(); - let errors = Array(); - let contents = await fs.readFile(filepath, 'binary'); + const errors = Array(); + const contents = await fs.readFile(filepath, 'binary'); let data; try { @@ -173,7 +173,7 @@ async function parseOFXNodeLibOFX( // Banks don't always implement the OFX standard properly // If no payee is available try and fallback to memo - let useMemoFallback = options.fallbackMissingPayeeToMemo; + const useMemoFallback = options.fallbackMissingPayeeToMemo; return { errors, diff --git a/packages/loot-core/src/server/accounts/payees.ts b/packages/loot-core/src/server/accounts/payees.ts index de612e0d013..fe334901897 100644 --- a/packages/loot-core/src/server/accounts/payees.ts +++ b/packages/loot-core/src/server/accounts/payees.ts @@ -4,7 +4,7 @@ import * as db from '../db'; export async function createPayee(description) { // Check to make sure no payee already exists with exactly the same // name - let row = await db.first( + const row = await db.first( `SELECT id FROM payees WHERE UNICODE_LOWER(name) = ? AND tombstone = 0`, [description.toLowerCase()], ); @@ -29,7 +29,7 @@ export async function getStartingBalancePayee() { ); } - let id = await createPayee('Starting Balance'); + const id = await createPayee('Starting Balance'); return { id, category: category ? category.id : null, diff --git a/packages/loot-core/src/server/accounts/qif2json.ts b/packages/loot-core/src/server/accounts/qif2json.ts index aff8e4c3d66..b3957c6cdef 100644 --- a/packages/loot-core/src/server/accounts/qif2json.ts +++ b/packages/loot-core/src/server/accounts/qif2json.ts @@ -19,10 +19,10 @@ type QIFTransaction = { }; export default function parse(qif, options: { dateFormat?: string } = {}) { - let lines = qif.split('\n'); + const lines = qif.split('\n'); let line = lines.shift(); - let type = /!Type:([^$]*)$/.exec(line.trim()); - let data: { + const type = /!Type:([^$]*)$/.exec(line.trim()); + const data: { dateFormat: string | undefined; type?; transactions: QIFTransaction[]; @@ -30,7 +30,7 @@ export default function parse(qif, options: { dateFormat?: string } = {}) { dateFormat: options.dateFormat, transactions: [], }; - let transactions = data.transactions; + const transactions = data.transactions; let transaction: QIFTransaction = {}; if (!type || !type.length) { @@ -69,7 +69,7 @@ export default function parse(qif, options: { dateFormat?: string } = {}) { transaction.payee = line.substring(1).replace(/&/g, '&'); break; case 'L': - let lArray = line.substring(1).split(':'); + const lArray = line.substring(1).split(':'); transaction.category = lArray[0]; if (lArray[1] !== undefined) { transaction.subcategory = lArray[1]; @@ -79,7 +79,7 @@ export default function parse(qif, options: { dateFormat?: string } = {}) { transaction.clearedStatus = line.substring(1); break; case 'S': - let sArray = line.substring(1).split(':'); + const sArray = line.substring(1).split(':'); division.category = sArray[0]; if (sArray[1] !== undefined) { division.subcategory = sArray[1]; diff --git a/packages/loot-core/src/server/accounts/rules.test.ts b/packages/loot-core/src/server/accounts/rules.test.ts index f4942f1a154..0db63167633 100644 --- a/packages/loot-core/src/server/accounts/rules.test.ts +++ b/packages/loot-core/src/server/accounts/rules.test.ts @@ -8,7 +8,7 @@ import { RuleIndexer, } from './rules'; -let fieldTypes = new Map( +const fieldTypes = new Map( Object.entries({ id: 'id', date: 'date', @@ -49,7 +49,7 @@ describe('Condition', () => { expect(cond.eval({ name: null })).toBe(false); ['gt', 'gte', 'lt', 'lte', 'isapprox'].forEach(op => { - let cond = new Condition(op, 'date', '2020-01-01', null, fieldTypes); + const cond = new Condition(op, 'date', '2020-01-01', null, fieldTypes); expect(cond.eval({ date: null })).toBe(false); }); @@ -308,8 +308,8 @@ describe('Condition', () => { describe('Action', () => { test('`set` operator sets a field', () => { - let action = new Action('set', 'name', 'James', null, fieldTypes); - let item = { name: 'Sarah' }; + const action = new Action('set', 'name', 'James', null, fieldTypes); + const item = { name: 'Sarah' }; action.exec(item); expect(item.name).toBe('James'); @@ -361,7 +361,7 @@ describe('Rule', () => { }); test('rule with `and` conditionsOp evaluates conditions as AND', () => { - let rule = new Rule({ + const rule = new Rule({ conditionsOp: 'and', conditions: [ { op: 'is', field: 'name', value: 'James' }, @@ -390,7 +390,7 @@ describe('Rule', () => { }); test('rule with `or` conditionsOp evaluates conditions as OR', () => { - let rule = new Rule({ + const rule = new Rule({ conditionsOp: 'or', conditions: [ { op: 'is', field: 'name', value: 'James' }, @@ -423,7 +423,7 @@ describe('Rule', () => { }); test('rules are deterministically ranked', () => { - let rule = (id, conditions) => + const rule = (id, conditions) => new Rule({ id, conditionsOp: 'and', @@ -431,7 +431,7 @@ describe('Rule', () => { actions: [], fieldTypes, }); - let expectOrder = (rules, ids) => + const expectOrder = (rules, ids) => expect(rules.map(r => r.getId())).toEqual(ids); let rules = [ @@ -460,10 +460,10 @@ describe('Rule', () => { }); test('iterateIds finds all the ids', () => { - let rule = (id, conditions, actions = []) => + const rule = (id, conditions, actions = []) => new Rule({ id, conditionsOp: 'and', conditions, actions, fieldTypes }); - let rules = [ + const rules = [ rule( 'first', [{ op: 'is', field: 'description', value: 'id1' }], @@ -488,7 +488,7 @@ describe('Rule', () => { ]), ]; - let foundRules = []; + const foundRules = []; iterateIds(rules, 'description', (rule, value) => { foundRules.push(rule.getId()); }); @@ -498,9 +498,9 @@ describe('Rule', () => { describe('RuleIndexer', () => { test('indexing a single field works', () => { - let indexer = new RuleIndexer({ field: 'name' }); + const indexer = new RuleIndexer({ field: 'name' }); - let rule = new Rule({ + const rule = new Rule({ conditionsOp: 'and', conditions: [{ op: 'is', field: 'name', value: 'James' }], actions: [{ op: 'set', field: 'name', value: 'Sarah' }], @@ -508,7 +508,7 @@ describe('RuleIndexer', () => { }); indexer.index(rule); - let rule2 = new Rule({ + const rule2 = new Rule({ conditionsOp: 'and', conditions: [{ op: 'is', field: 'category', value: 'foo' }], actions: [{ op: 'set', field: 'name', value: 'Sarah' }], @@ -531,8 +531,8 @@ describe('RuleIndexer', () => { test('indexing using the firstchar method works', () => { // A condition that references both of the fields - let indexer = new RuleIndexer({ field: 'category', method: 'firstchar' }); - let rule = new Rule({ + const indexer = new RuleIndexer({ field: 'category', method: 'firstchar' }); + const rule = new Rule({ conditionsOp: 'and', conditions: [ { op: 'is', field: 'name', value: 'James' }, @@ -543,7 +543,7 @@ describe('RuleIndexer', () => { }); indexer.index(rule); - let rule2 = new Rule({ + const rule2 = new Rule({ conditionsOp: 'and', conditions: [{ op: 'is', field: 'category', value: 'bars' }], actions: [{ op: 'set', field: 'name', value: 'Sarah' }], @@ -551,7 +551,7 @@ describe('RuleIndexer', () => { }); indexer.index(rule2); - let rule3 = new Rule({ + const rule3 = new Rule({ conditionsOp: 'and', conditions: [{ op: 'is', field: 'date', value: '2020-01-20' }], actions: [{ op: 'set', field: 'name', value: 'Sarah' }], @@ -582,7 +582,7 @@ describe('RuleIndexer', () => { }); test('re-indexing a field works', () => { - let indexer = new RuleIndexer({ field: 'category', method: 'firstchar' }); + const indexer = new RuleIndexer({ field: 'category', method: 'firstchar' }); let rule = new Rule({ id: 'id1', @@ -619,9 +619,9 @@ describe('RuleIndexer', () => { }); test('indexing works with the oneOf operator', () => { - let indexer = new RuleIndexer({ field: 'name', method: 'firstchar' }); + const indexer = new RuleIndexer({ field: 'name', method: 'firstchar' }); - let rule = new Rule({ + const rule = new Rule({ conditionsOp: 'and', conditions: [ { op: 'oneOf', field: 'name', value: ['James', 'Sarah', 'Evy'] }, @@ -631,7 +631,7 @@ describe('RuleIndexer', () => { }); indexer.index(rule); - let rule2 = new Rule({ + const rule2 = new Rule({ conditionsOp: 'and', conditions: [{ op: 'is', field: 'name', value: 'Georgia' }], actions: [{ op: 'set', field: 'category', value: 'Food' }], diff --git a/packages/loot-core/src/server/accounts/rules.ts b/packages/loot-core/src/server/accounts/rules.ts index 846682f166a..0869e6f618c 100644 --- a/packages/loot-core/src/server/accounts/rules.ts +++ b/packages/loot-core/src/server/accounts/rules.ts @@ -24,7 +24,7 @@ function assert(test, type, msg) { function parseRecurDate(desc) { try { - let rules = recurConfigToRSchedule(desc); + const rules = recurConfigToRSchedule(desc); return { type: 'recur', @@ -71,19 +71,19 @@ export function parseDateString(str) { } function parseBetweenAmount(between) { - let { num1, num2 } = between; + const { num1, num2 } = between; if (typeof num1 !== 'number' || typeof num2 !== 'number') { return null; } return { type: 'between', num1, num2 }; } -let CONDITION_TYPES = { +const CONDITION_TYPES = { date: { ops: ['is', 'isapprox', 'gt', 'gte', 'lt', 'lte'], nullable: false, parse(op, value, fieldName) { - let parsed = + const parsed = typeof value === 'string' ? parseDateString(value) : value.frequency != null @@ -160,7 +160,7 @@ let CONDITION_TYPES = { ops: ['is', 'isapprox', 'isbetween', 'gt', 'gte', 'lt', 'lte'], nullable: false, parse(op, value, fieldName) { - let parsed = + const parsed = typeof value === 'number' ? { type: 'literal', value } : parseBetweenAmount(value); @@ -215,10 +215,10 @@ export class Condition { value; constructor(op, field, value, options, fieldTypes) { - let typeName = fieldTypes.get(field); + const typeName = fieldTypes.get(field); assert(typeName, 'internal', 'Invalid condition field: ' + field); - let type = CONDITION_TYPES[typeName]; + const type = CONDITION_TYPES[typeName]; // It's important to validate rules because a faulty rule might mess // up the user's transaction (and be very confusing) @@ -261,7 +261,7 @@ export class Condition { fieldValue = fieldValue.toLowerCase(); } - let type = this.type; + const type = this.type; if (type === 'number' && this.options) { if (this.options.outflow) { @@ -277,7 +277,7 @@ export class Condition { } } - let extractValue = v => (type === 'number' ? v.value : v); + const extractValue = v => (type === 'number' ? v.value : v); switch (this.op) { case 'isapprox': @@ -288,9 +288,9 @@ export class Condition { } if (this.value.type === 'recur') { - let { schedule } = this.value; + const { schedule } = this.value; if (this.op === 'isapprox') { - let fieldDate = parseDate(fieldValue); + const fieldDate = parseDate(fieldValue); return schedule.occursBetween( dateFns.subDays(fieldDate, 2), dateFns.addDays(fieldDate, 2), @@ -299,12 +299,12 @@ export class Condition { return schedule.occursOn({ date: parseDate(fieldValue) }); } } else { - let { date } = this.value; + const { date } = this.value; if (this.op === 'isapprox') { - let fullDate = parseDate(date); - let high = addDays(fullDate, 2); - let low = subDays(fullDate, 2); + const fullDate = parseDate(date); + const high = addDays(fullDate, 2); + const low = subDays(fullDate, 2); return fieldValue >= low && fieldValue <= high; } else { @@ -320,9 +320,9 @@ export class Condition { } } } else if (type === 'number') { - let number = this.value.value; + const number = this.value.value; if (this.op === 'isapprox') { - let threshold = getApproxNumberThreshold(number); + const threshold = getApproxNumberThreshold(number); return ( fieldValue >= number - threshold && fieldValue <= number + threshold @@ -337,7 +337,7 @@ export class Condition { case 'isbetween': { // The parsing logic already checks that the value is of the // right type (only numbers with high and low) - let [low, high] = sortNumbers(this.value.num1, this.value.num2); + const [low, high] = sortNumbers(this.value.num1, this.value.num2); return fieldValue >= low && fieldValue <= high; } case 'contains': @@ -419,7 +419,7 @@ export class Condition { type ActionOperator = 'set' | 'link-schedule'; -let ACTION_OPS: ActionOperator[] = ['set', 'link-schedule']; +const ACTION_OPS: ActionOperator[] = ['set', 'link-schedule']; export class Action { field; @@ -437,7 +437,7 @@ export class Action { ); if (op === 'set') { - let typeName = fieldTypes.get(field); + const typeName = fieldTypes.get(field); assert(typeName, 'internal', `Invalid field for action: ${field}`); this.field = field; this.type = typeName; @@ -520,7 +520,7 @@ export class Rule { } execActions(object) { - let changes = {}; + const changes = {}; this.actions.forEach(action => action.exec(changes)); return changes; } @@ -534,7 +534,7 @@ export class Rule { // Apply is similar to exec but applies the changes for you apply(object) { - let changes = this.exec(object); + const changes = this.exec(object); return Object.assign({}, object, changes); } @@ -586,8 +586,8 @@ export class RuleIndexer { } getIndexes(rule) { - let cond = rule.conditions.find(cond => cond.field === this.field); - let indexes = []; + const cond = rule.conditions.find(cond => cond.field === this.field); + const indexes = []; if ( cond && @@ -609,14 +609,14 @@ export class RuleIndexer { } index(rule) { - let indexes = this.getIndexes(rule); + const indexes = this.getIndexes(rule); indexes.forEach(index => { index.add(rule); }); } remove(rule) { - let indexes = this.getIndexes(rule); + const indexes = this.getIndexes(rule); indexes.forEach(index => { index.delete(rule); }); @@ -625,7 +625,7 @@ export class RuleIndexer { getApplicableRules(object) { let indexedRules; if (this.field in object) { - let key = this.getKey(object[this.field]); + const key = this.getKey(object[this.field]); if (key) { indexedRules = this.rules.get(key); } @@ -654,7 +654,7 @@ const OP_SCORES: Record = { }; function computeScore(rule) { - let initialScore = rule.conditions.reduce((score, condition) => { + const initialScore = rule.conditions.reduce((score, condition) => { if (OP_SCORES[condition.op] == null) { console.log(`Found invalid operation while ranking: ${condition.op}`); return 0; @@ -679,7 +679,7 @@ function computeScore(rule) { } function _rankRules(rules) { - let scores = new Map(); + const scores = new Map(); rules.forEach(rule => { scores.set(rule, computeScore(rule)); }); @@ -688,15 +688,15 @@ function _rankRules(rules) { // order. That's why rules have ids: if two rules have the same score, it // sorts by id return [...rules].sort((r1, r2) => { - let score1 = scores.get(r1); - let score2 = scores.get(r2); + const score1 = scores.get(r1); + const score2 = scores.get(r2); if (score1 < score2) { return -1; } else if (score1 > score2) { return 1; } else { - let id1 = r1.getId(); - let id2 = r2.getId(); + const id1 = r1.getId(); + const id2 = r2.getId(); return id1 < id2 ? -1 : id1 > id2 ? 1 : 0; } }); @@ -707,7 +707,7 @@ export function rankRules(rules) { let normal = []; let post = []; - for (let rule of rules) { + for (const rule of rules) { switch (rule.stage) { case 'pre': pre.push(rule); @@ -744,7 +744,7 @@ export function migrateIds(rule, mappings) { // first id back to make [1, 2]. Keeping the original value around // solves this. for (let ci = 0; ci < rule.conditions.length; ci++) { - let cond = rule.conditions[ci]; + const cond = rule.conditions[ci]; if (cond.type === 'id') { switch (cond.op) { case 'is': @@ -769,7 +769,7 @@ export function migrateIds(rule, mappings) { } for (let ai = 0; ai < rule.actions.length; ai++) { - let action = rule.actions[ai]; + const action = rule.actions[ai]; if (action.type === 'id') { if (action.op === 'set') { action.value = mappings.get(action.rawValue) || action.rawValue; @@ -783,9 +783,9 @@ export function iterateIds(rules, fieldName, func) { let i; ruleiter: for (i = 0; i < rules.length; i++) { - let rule = rules[i]; + const rule = rules[i]; for (let ci = 0; ci < rule.conditions.length; ci++) { - let cond = rule.conditions[ci]; + const cond = rule.conditions[ci]; if (cond.type === 'id' && cond.field === fieldName) { switch (cond.op) { case 'is': @@ -818,7 +818,7 @@ export function iterateIds(rules, fieldName, func) { } for (let ai = 0; ai < rule.actions.length; ai++) { - let action = rule.actions[ai]; + const action = rule.actions[ai]; if (action.type === 'id' && action.field === fieldName) { // Currently `set` is the only op, but if we add more this // will need to be extended diff --git a/packages/loot-core/src/server/accounts/sync.test.ts b/packages/loot-core/src/server/accounts/sync.test.ts index aedd21a9109..c3a5f66c7a5 100644 --- a/packages/loot-core/src/server/accounts/sync.test.ts +++ b/packages/loot-core/src/server/accounts/sync.test.ts @@ -112,7 +112,7 @@ async function getAllPayees() { describe('Account sync', () => { test('reconcile creates payees correctly', async () => { - let { id } = await prepareDatabase(); + const { id } = await prepareDatabase(); let payees = await getAllPayees(); expect(payees.length).toBe(0); @@ -125,7 +125,7 @@ describe('Account sync', () => { payees = await getAllPayees(); expect(payees.length).toBe(2); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); expect(transactions.length).toBe(2); expect(transactions.find(t => t.amount === 4133).payee).toBe( payees.find(p => p.name === 'Bakkerij').id, @@ -136,16 +136,16 @@ describe('Account sync', () => { }); test('reconcile matches single transaction', async () => { - let mockTransactions = prepMockTransactions(); + const mockTransactions = prepMockTransactions(); const { id, account_id } = await prepareDatabase(); await syncAccount('userId', 'userKey', id, account_id, 'bank'); // The payee can be anything, all that matters is the amount is the same - let mockTransaction = mockTransactions.find(t => t.date === '2017-10-17'); + const mockTransaction = mockTransactions.find(t => t.date === '2017-10-17'); mockTransaction.amount = 29.47; - let payeeId = await db.insertPayee({ name: 'macy' }); + const payeeId = await db.insertPayee({ name: 'macy' }); await db.insertTransaction({ id: 'one', account: id, @@ -154,7 +154,7 @@ describe('Account sync', () => { payee: payeeId, }); - let { added, updated } = await reconcileTransactions( + const { added, updated } = await reconcileTransactions( id, mockTransactions.filter(t => t.date >= '2017-10-15').map(fromPlaid), ); @@ -162,18 +162,18 @@ describe('Account sync', () => { expect(added.length).toBe(3); expect(updated.length).toBe(1); - let transactions = await getAllTransactions(); - let transaction = transactions.find(t => t.amount === -2947); + const transactions = await getAllTransactions(); + const transaction = transactions.find(t => t.amount === -2947); expect(transaction.id).toBe(updated[0]); // The payee has not been updated - it's still the payee that the original transaction had - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(18); expect(transaction.payee).toBe(payeeId); }); test('reconcile matches multiple transactions', async () => { - let mockTransactions = prepMockTransactions(); + const mockTransactions = prepMockTransactions(); const { id, account_id } = await prepareDatabase(); await syncAccount('userId', 'userKey', id, account_id, 'bank'); @@ -183,7 +183,7 @@ describe('Account sync', () => { // name. This should happen even though other transactions with // the same amount are imported first, i.e. high fidelity matches // always win - let mocked = mockTransactions.filter(t => t.date === '2017-10-17'); + const mocked = mockTransactions.filter(t => t.date === '2017-10-17'); mocked[0].name = papaJohns; mocked[0].amount = 29.47; mocked[1].name = 'Lowe’s Store'; @@ -216,12 +216,12 @@ describe('Account sync', () => { payee: await db.insertPayee({ name: 'macy' }), }); - let { added, updated } = await reconcileTransactions( + const { added, updated } = await reconcileTransactions( id, mockTransactions.filter(t => t.date >= '2017-10-15').map(fromPlaid), ); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); expect(updated.length).toBe(3); expect(added.length).toBe(1); @@ -237,12 +237,12 @@ describe('Account sync', () => { }); test('reconcile matches multiple transactions (imported_id wins)', async () => { - let mockTransactions = prepMockTransactions(); + const mockTransactions = prepMockTransactions(); const { id, account_id } = await prepareDatabase(); await syncAccount('userId', 'userKey', id, account_id, 'bank'); - let mocked = mockTransactions.filter(t => t.date === '2017-10-17'); + const mocked = mockTransactions.filter(t => t.date === '2017-10-17'); mocked[0].name = papaJohns; mocked[0].amount = 29.47; mocked[1].name = lowes; @@ -267,12 +267,12 @@ describe('Account sync', () => { payee: await db.insertPayee({ name: 'lowes' }), }); - let { added, updated } = await reconcileTransactions( + const { added, updated } = await reconcileTransactions( id, mockTransactions.filter(t => t.date >= '2017-10-15').map(fromPlaid), ); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); expect(updated).toEqual(['two', 'one']); expect(added.length).toBe(2); @@ -286,7 +286,7 @@ describe('Account sync', () => { const { id, account_id } = await prepareDatabase(); await syncAccount('userId', 'userKey', id, account_id, 'bank'); - let differ = expectSnapshotWithDiffer(await getAllTransactions()); + const differ = expectSnapshotWithDiffer(await getAllTransactions()); mockTransactions = mockTransactions.filter(t => t.date === '2017-10-17'); mockTransactions[0].name = 'foo'; @@ -323,7 +323,7 @@ describe('Account sync', () => { }); test('import updates transfers when matched', async () => { - let mockTransactions = prepMockTransactions(); + const mockTransactions = prepMockTransactions(); const { id, account_id } = await prepareDatabase(); await db.insertAccount({ id: 'two', name: 'two' }); await db.insertPayee({ @@ -333,7 +333,7 @@ describe('Account sync', () => { }); await syncAccount('userId', 'userKey', id, account_id, 'bank'); - let differ = expectSnapshotWithDiffer(await getAllTransactions()); + const differ = expectSnapshotWithDiffer(await getAllTransactions()); const mockTransaction = mockTransactions.find(t => t.date === '2017-10-17'); mockTransaction.name = '#001 fenn st Macy’s 33333 EMX'; @@ -373,12 +373,12 @@ describe('Account sync', () => { { date: '2020-01-01', amount: 2948 }, ]); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); expect(transactions.length).toBe(2); expect(transactions).toMatchSnapshot(); // No payees should be created - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(0); // Make _at least_ the date is required @@ -393,12 +393,12 @@ describe('Account sync', () => { id: 'group2', name: 'group2', }); - let catId = await db.insertCategory({ + const catId = await db.insertCategory({ name: 'Food', cat_group: 'group2', }); - let payeeId = await db.insertPayee({ name: 'bakkerij' }); + const payeeId = await db.insertPayee({ name: 'bakkerij' }); await insertRule({ stage: null, @@ -411,7 +411,7 @@ describe('Account sync', () => { { date: '2020-01-02', payee_name: 'Bakkerij', amount: 4133 }, ]); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); // Even though the payee was inferred from the string name (no // renaming rules ran), it should match the above rule and set the // category @@ -420,7 +420,7 @@ describe('Account sync', () => { expect(transactions[0].category).toBe(catId); // It also should not have created a payee - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(1); expect(payees[0].id).toBe(payeeId); }); @@ -432,7 +432,7 @@ describe('Account sync', () => { { date: '2020-01-02', payee_name: ' ', amount: 4133 }, ]); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); // Even though the payee was inferred from the string name (no // renaming rules ran), it should match the above rule and set the // category @@ -442,14 +442,14 @@ describe('Account sync', () => { expect(transactions[0].date).toBe(20200102); // It also should not have created a payee - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(0); }); test('reconcile run rules dont create unnecessary payees', async () => { const { id: acctId } = await prepareDatabase(); - let payeeId = await db.insertPayee({ name: 'bakkerij-renamed' }); + const payeeId = await db.insertPayee({ name: 'bakkerij-renamed' }); await insertRule({ stage: null, @@ -462,16 +462,16 @@ describe('Account sync', () => { { date: '2020-01-02', payee_name: 'bakkerij', amount: 4133 }, ]); - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(1); expect(payees[0].id).toBe(payeeId); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); expect(transactions.length).toBe(1); expect(transactions[0].payee).toBe(payeeId); }); - let testMapped = version => { + const testMapped = version => { test(`reconcile matches unmapped and mapped payees (${version})`, async () => { const { id: acctId } = await prepareDatabase(); @@ -487,8 +487,8 @@ describe('Account sync', () => { // to } - let payeeId1 = await db.insertPayee({ name: 'bakkerij2' }); - let payeeId2 = await db.insertPayee({ name: 'bakkerij-renamed' }); + const payeeId1 = await db.insertPayee({ name: 'bakkerij2' }); + const payeeId2 = await db.insertPayee({ name: 'bakkerij-renamed' }); // Insert a rule *before* payees are merged. Not that v2 would // fail if we inserted this rule after, because the rule would @@ -528,7 +528,7 @@ describe('Account sync', () => { payee: null, }); - let { updated } = await reconcileTransactions(acctId, [ + const { updated } = await reconcileTransactions(acctId, [ { date: '2017-10-17', payee_name: 'bakkerij', @@ -537,14 +537,14 @@ describe('Account sync', () => { }, ]); - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(1); expect(payees[0].id).toBe(version === 'v1' ? payeeId2 : payeeId1); expect(updated.length).toBe(1); expect(updated[0]).toBe('one'); - let transactions = await getAllTransactions(); + const transactions = await getAllTransactions(); expect(transactions.length).toBe(2); expect(transactions.find(t => t.id === 'one').imported_id).toBe( 'imported1', @@ -558,7 +558,7 @@ describe('Account sync', () => { test('addTransactions simply adds transactions', async () => { const { id: acctId } = await prepareDatabase(); - let payeeId = await db.insertPayee({ name: 'bakkerij-renamed' }); + const payeeId = await db.insertPayee({ name: 'bakkerij-renamed' }); // Make sure it still runs rules await insertRule({ @@ -568,7 +568,7 @@ describe('Account sync', () => { actions: [{ op: 'set', field: 'payee', value: payeeId }], }); - let transactions = [ + const transactions = [ { date: '2017-10-17', payee_name: 'BAKKerij', @@ -591,15 +591,15 @@ describe('Account sync', () => { }, ]; - let added = await addTransactions(acctId, transactions); + const added = await addTransactions(acctId, transactions); expect(added.length).toBe(transactions.length); - let payees = await getAllPayees(); + const payees = await getAllPayees(); expect(payees.length).toBe(3); - let getName = id => payees.find(p => p.id === id).name; + const getName = id => payees.find(p => p.id === id).name; - let allTransactions = await getAllTransactions(); + const allTransactions = await getAllTransactions(); expect(allTransactions.length).toBe(4); expect(allTransactions.map(t => getName(t.payee))).toEqual([ 'bakkerij3', diff --git a/packages/loot-core/src/server/accounts/sync.ts b/packages/loot-core/src/server/accounts/sync.ts index 112e9182a30..3b3e7332e56 100644 --- a/packages/loot-core/src/server/accounts/sync.ts +++ b/packages/loot-core/src/server/accounts/sync.ts @@ -28,7 +28,7 @@ function BankSyncError(type: string, code: string) { function makeSplitTransaction(trans, subtransactions) { // We need to calculate the final state of split transactions - let { subtransactions: sub, ...parent } = recalculateSplit({ + const { subtransactions: sub, ...parent } = recalculateSplit({ ...trans, is_parent: true, subtransactions: subtransactions.map((transaction, idx) => @@ -60,13 +60,13 @@ async function updateAccountBalance(id, balance) { } export async function getAccounts(userId, userKey, id) { - let res = await post(getServer().PLAID_SERVER + '/accounts', { + const res = await post(getServer().PLAID_SERVER + '/accounts', { userId, key: userKey, item_id: id, }); - let { accounts } = res; + const { accounts } = res; accounts.forEach(acct => { acct.balances.current = getAccountBalance(acct); @@ -79,7 +79,7 @@ export async function getGoCardlessAccounts(userId, userKey, id) { const userToken = await asyncStorage.getItem('user-token'); if (!userToken) return; - let res = await post( + const res = await post( getServer().GOCARDLESS_SERVER + '/accounts', { userId, @@ -91,7 +91,7 @@ export async function getGoCardlessAccounts(userId, userKey, id) { }, ); - let { accounts } = res; + const { accounts } = res; accounts.forEach(acct => { acct.balances.current = getAccountBalance(acct); @@ -120,7 +120,7 @@ async function downloadTransactions( ) { let allTransactions = []; let accountBalance = null; - let pageSize = 100; + const pageSize = 100; let offset = 0; let numDownloaded = 0; @@ -181,7 +181,7 @@ async function downloadGoCardlessTransactions( bankId, since, ) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) return; const res = await post( @@ -226,7 +226,7 @@ async function resolvePayee(trans, payeeName, payeesToCreate) { return payee.id; } else { // Otherwise we're going to create a new one - let newPayee = { id: uuidv4(), name: payeeName }; + const newPayee = { id: uuidv4(), name: payeeName }; payeesToCreate.set(payeeName.toLowerCase(), newPayee); return newPayee.id; } @@ -240,9 +240,9 @@ async function normalizeTransactions( acctId, { rawPayeeName = false } = {}, ) { - let payeesToCreate = new Map(); + const payeesToCreate = new Map(); - let normalized = []; + const normalized = []; for (let trans of transactions) { // Validate the date because we do some stuff with it. The db // layer does better validation, but this will give nicer errors @@ -251,11 +251,12 @@ async function normalizeTransactions( } // Strip off the irregular properties - let { payee_name, subtransactions, ...rest } = trans; + const { subtransactions, ...rest } = trans; trans = rest; + let payee_name = trans.payee_name; if (payee_name) { - let trimmed = payee_name.trim(); + const trimmed = payee_name.trim(); if (trimmed === '') { payee_name = null; } else { @@ -287,10 +288,10 @@ async function normalizeTransactions( } async function normalizeGoCardlessTransactions(transactions, acctId) { - let payeesToCreate = new Map(); + const payeesToCreate = new Map(); - let normalized = []; - for (let trans of transactions) { + const normalized = []; + for (const trans of transactions) { if (!trans.amount) { trans.amount = trans.transactionAmount.amount; } @@ -379,10 +380,10 @@ async function normalizeGoCardlessTransactions(transactions, acctId) { } async function createNewPayees(payeesToCreate, addsAndUpdates) { - let usedPayeeIds = new Set(addsAndUpdates.map(t => t.payee)); + const usedPayeeIds = new Set(addsAndUpdates.map(t => t.payee)); await batchMessages(async () => { - for (let payee of payeesToCreate.values()) { + for (const payee of payeesToCreate.values()) { // Only create the payee if it ended up being used if (usedPayeeIds.has(payee.id)) { await db.insertPayee(payee); @@ -396,16 +397,16 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { const updated = []; const added = []; - let { normalized, payeesToCreate } = await normalizeGoCardlessTransactions( + const { normalized, payeesToCreate } = await normalizeGoCardlessTransactions( transactions, acctId, ); // The first pass runs the rules, and preps data for fuzzy matching - let transactionsStep1 = []; - for (let { payee_name, trans, subtransactions } of normalized) { + const transactionsStep1 = []; + for (const { payee_name, trans, subtransactions } of normalized) { // Run the rules - trans = runRules(trans); + const updatedTrans = runRules(trans); let match = null; let fuzzyDataset = null; @@ -413,10 +414,10 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { // First, match with an existing transaction's imported_id. This // is the highest fidelity match and should always be attempted // first. - if (trans.imported_id) { + if (updatedTrans.imported_id) { match = await db.first( 'SELECT * FROM v_transactions WHERE imported_id = ? AND account = ?', - [trans.imported_id, acctId], + [updatedTrans.imported_id, acctId], ); if (match) { @@ -434,9 +435,9 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { `SELECT id, is_parent, date, imported_id, payee, category, notes FROM v_transactions WHERE date >= ? AND date <= ? AND amount = ? AND account = ? AND is_child = 0`, [ - db.toDateRepr(monthUtils.subDays(trans.date, 4)), - db.toDateRepr(monthUtils.addDays(trans.date, 1)), - trans.amount || 0, + db.toDateRepr(monthUtils.subDays(updatedTrans.date, 4)), + db.toDateRepr(monthUtils.addDays(updatedTrans.date, 1)), + updatedTrans.amount || 0, acctId, ], ); @@ -444,7 +445,7 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { transactionsStep1.push({ payee_name, - trans, + updatedTrans, subtransactions, match, fuzzyDataset, @@ -456,10 +457,10 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { // matching always happens first, i.e. a transaction should match // match with low fidelity if a later transaction is going to match // the same one with high fidelity. - let transactionsStep2 = transactionsStep1.map(data => { + const transactionsStep2 = transactionsStep1.map(data => { if (!data.match && data.fuzzyDataset) { // Try to find one where the payees match. - let match = data.fuzzyDataset.find( + const match = data.fuzzyDataset.find( row => !hasMatched.has(row.id) && data.trans.payee === row.payee, ); @@ -475,9 +476,9 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { // matching: it just find the first transaction that hasn't been // matched yet. Remember the the dataset only contains transactions // around the same date with the same amount. - let transactionsStep3 = transactionsStep2.map(data => { + const transactionsStep3 = transactionsStep2.map(data => { if (!data.match && data.fuzzyDataset) { - let match = data.fuzzyDataset.find(row => !hasMatched.has(row.id)); + const match = data.fuzzyDataset.find(row => !hasMatched.has(row.id)); if (match) { hasMatched.add(match.id); return { ...data, match }; @@ -487,10 +488,10 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { }); // Finally, generate & commit the changes - for (let { trans, subtransactions, match } of transactionsStep3) { + for (const { trans, subtransactions, match } of transactionsStep3) { if (match) { // TODO: change the above sql query to use aql - let existing = { + const existing = { ...match, cleared: match.cleared === 1, date: db.fromDateRepr(match.date), @@ -521,7 +522,7 @@ export async function reconcileGoCardlessTransactions(acctId, transactions) { } } else { // Insert a new transaction - let finalTransaction = { + const finalTransaction = { ...trans, id: uuidv4(), category: trans.category || null, @@ -550,16 +551,16 @@ export async function reconcileTransactions(acctId, transactions) { const updated = []; const added = []; - let { normalized, payeesToCreate } = await normalizeTransactions( + const { normalized, payeesToCreate } = await normalizeTransactions( transactions, acctId, ); // The first pass runs the rules, and preps data for fuzzy matching - let transactionsStep1 = []; - for (let { payee_name, trans, subtransactions } of normalized) { + const transactionsStep1 = []; + for (const { payee_name, trans, subtransactions } of normalized) { // Run the rules - trans = runRules(trans); + const updatedTrans = runRules(trans); let match = null; let fuzzyDataset = null; @@ -567,10 +568,10 @@ export async function reconcileTransactions(acctId, transactions) { // First, match with an existing transaction's imported_id. This // is the highest fidelity match and should always be attempted // first. - if (trans.imported_id) { + if (updatedTrans.imported_id) { match = await db.first( 'SELECT * FROM v_transactions WHERE imported_id = ? AND account = ?', - [trans.imported_id, acctId], + [updatedTrans.imported_id, acctId], ); if (match) { @@ -588,9 +589,9 @@ export async function reconcileTransactions(acctId, transactions) { `SELECT id, is_parent, date, imported_id, payee, category, notes FROM v_transactions WHERE date >= ? AND date <= ? AND amount = ? AND account = ? AND is_child = 0`, [ - db.toDateRepr(monthUtils.subDays(trans.date, 4)), - db.toDateRepr(monthUtils.addDays(trans.date, 1)), - trans.amount || 0, + db.toDateRepr(monthUtils.subDays(updatedTrans.date, 4)), + db.toDateRepr(monthUtils.addDays(updatedTrans.date, 1)), + updatedTrans.amount || 0, acctId, ], ); @@ -598,7 +599,7 @@ export async function reconcileTransactions(acctId, transactions) { transactionsStep1.push({ payee_name, - trans, + updatedTrans, subtransactions, match, fuzzyDataset, @@ -610,10 +611,10 @@ export async function reconcileTransactions(acctId, transactions) { // matching always happens first, i.e. a transaction should match // match with low fidelity if a later transaction is going to match // the same one with high fidelity. - let transactionsStep2 = transactionsStep1.map(data => { + const transactionsStep2 = transactionsStep1.map(data => { if (!data.match && data.fuzzyDataset) { // Try to find one where the payees match. - let match = data.fuzzyDataset.find( + const match = data.fuzzyDataset.find( row => !hasMatched.has(row.id) && data.trans.payee === row.payee, ); @@ -629,9 +630,9 @@ export async function reconcileTransactions(acctId, transactions) { // matching: it just find the first transaction that hasn't been // matched yet. Remember the the dataset only contains transactions // around the same date with the same amount. - let transactionsStep3 = transactionsStep2.map(data => { + const transactionsStep3 = transactionsStep2.map(data => { if (!data.match && data.fuzzyDataset) { - let match = data.fuzzyDataset.find(row => !hasMatched.has(row.id)); + const match = data.fuzzyDataset.find(row => !hasMatched.has(row.id)); if (match) { hasMatched.add(match.id); return { ...data, match }; @@ -641,10 +642,10 @@ export async function reconcileTransactions(acctId, transactions) { }); // Finally, generate & commit the changes - for (let { trans, subtransactions, match } of transactionsStep3) { + for (const { trans, subtransactions, match } of transactionsStep3) { if (match) { // TODO: change the above sql query to use aql - let existing = { + const existing = { ...match, cleared: match.cleared === 1, date: db.fromDateRepr(match.date), @@ -676,7 +677,7 @@ export async function reconcileTransactions(acctId, transactions) { } } else { // Insert a new transaction - let finalTransaction = { + const finalTransaction = { ...trans, id: uuidv4(), category: trans.category || null, @@ -709,21 +710,21 @@ export async function addTransactions( ) { const added = []; - let { normalized, payeesToCreate } = await normalizeTransactions( + const { normalized, payeesToCreate } = await normalizeTransactions( transactions, acctId, { rawPayeeName: true }, ); - for (let { trans, subtransactions } of normalized) { + for (const { trans, subtransactions } of normalized) { // Run the rules - trans = runRules(trans); + const updatedTrans = runRules(trans); - let finalTransaction = { + const finalTransaction = { id: uuidv4(), - ...trans, + ...updatedTrans, account: acctId, - cleared: trans.cleared != null ? trans.cleared : true, + cleared: updatedTrans.cleared != null ? updatedTrans.cleared : true, }; // Add split transactions if they are given @@ -738,7 +739,7 @@ export async function addTransactions( let newTransactions; if (runTransfers || learnCategories) { - let res = await batchUpdateTransactions({ + const res = await batchUpdateTransactions({ added, learnCategories, runTransfers, @@ -790,7 +791,7 @@ export async function syncGoCardlessAccount( ]), ); - let { transactions, accountBalance } = await downloadGoCardlessTransactions( + const gocardlessResult = await downloadGoCardlessTransactions( userId, userKey, acctId, @@ -798,12 +799,14 @@ export async function syncGoCardlessAccount( startDate, ); + let transactions = gocardlessResult.transactions; if (transactions.length === 0) { return { added: [], updated: [] }; } transactions = transactions.map(trans => ({ ...trans, account: id })); + const accountBalance = gocardlessResult.accountBalance; return runMutator(async () => { const result = await reconcileGoCardlessTransactions(id, transactions); await updateAccountBalance(id, accountBalance); @@ -838,7 +841,7 @@ export async function syncGoCardlessAccount( const payee = await getStartingBalancePayee(); return runMutator(async () => { - let initialId = await db.insertTransaction({ + const initialId = await db.insertTransaction({ account: id, amount: startingBalance, category: acctRow.offbudget === 0 ? payee.category : null, @@ -848,7 +851,7 @@ export async function syncGoCardlessAccount( starting_balance_flag: true, }); - let result = await reconcileGoCardlessTransactions(id, transactions); + const result = await reconcileGoCardlessTransactions(id, transactions); return { ...result, added: [initialId, ...result.added], @@ -887,19 +890,22 @@ export async function syncAccount(userId, userKey, id, acctId, bankId) { date = startingDate; } - let { transactions, accountBalance } = await downloadTransactions( + const downloadResult = await downloadTransactions( userId, userKey, acctId, bankId, date, ); + + let transactions = downloadResult.transactions; if (transactions.length === 0) { return { added: [], updated: [] }; } transactions = transactions.map(trans => ({ ...trans, account: id })); + const accountBalance = downloadResult.accountBalance; return runMutator(async () => { const result = await reconcileTransactions(id, transactions); await updateAccountBalance(id, accountBalance); @@ -928,7 +934,7 @@ export async function syncAccount(userId, userKey, id, acctId, bankId) { // before the first imported transaction, we need to get the // current balance from the accounts table and subtract all the // imported transactions. - let currentBalance = acctRow.balance_current; + const currentBalance = acctRow.balance_current; const previousBalance = transactions.reduce((total, trans) => { return total - trans.amount; @@ -939,10 +945,10 @@ export async function syncAccount(userId, userKey, id, acctId, bankId) { ? transactions[transactions.length - 1].date : monthUtils.currentDay(); - let payee = await getStartingBalancePayee(); + const payee = await getStartingBalancePayee(); return runMutator(async () => { - let initialId = await db.insertTransaction({ + const initialId = await db.insertTransaction({ account: id, amount: previousBalance, category: acctRow.offbudget === 0 ? payee.category : null, @@ -952,7 +958,7 @@ export async function syncAccount(userId, userKey, id, acctId, bankId) { starting_balance_flag: true, }); - let result = await reconcileTransactions(id, transactions); + const result = await reconcileTransactions(id, transactions); return { ...result, added: [initialId, ...result.added], diff --git a/packages/loot-core/src/server/accounts/title/index.ts b/packages/loot-core/src/server/accounts/title/index.ts index 358793579b9..45ec9c58521 100644 --- a/packages/loot-core/src/server/accounts/title/index.ts +++ b/packages/loot-core/src/server/accounts/title/index.ts @@ -2,14 +2,14 @@ import lowerCase from './lower-case'; import specials from './specials'; -let character = +const character = '[0-9\u0041-\u005A\u0061-\u007A\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376-\u0377\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u0523\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0621-\u064A\u066E-\u066F\u0671-\u06D3\u06D5\u06E5-\u06E6\u06EE-\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4-\u07F5\u07FA\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0972\u097B-\u097F\u0985-\u098C\u098F-\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC-\u09DD\u09DF-\u09E1\u09F0-\u09F1\u0A05-\u0A0A\u0A0F-\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32-\u0A33\u0A35-\u0A36\u0A38-\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2-\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0-\u0AE1\u0B05-\u0B0C\u0B0F-\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32-\u0B33\u0B35-\u0B39\u0B3D\u0B5C-\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99-\u0B9A\u0B9C\u0B9E-\u0B9F\u0BA3-\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D\u0C58-\u0C59\u0C60-\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0-\u0CE1\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D28\u0D2A-\u0D39\u0D3D\u0D60-\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32-\u0E33\u0E40-\u0E46\u0E81-\u0E82\u0E84\u0E87-\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA-\u0EAB\u0EAD-\u0EB0\u0EB2-\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDD\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8B\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065-\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10D0-\u10FA\u10FC\u1100-\u1159\u115F-\u11A2\u11A8-\u11F9\u1200-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u1676\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F0\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u1900-\u191C\u1950-\u196D\u1970-\u1974\u1980-\u19A9\u19C1-\u19C7\u1A00-\u1A16\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE-\u1BAF\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u2094\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2C6F\u2C71-\u2C7D\u2C80-\u2CE4\u2D00-\u2D25\u2D30-\u2D65\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31B7\u31F0-\u31FF\u3400\u4DB5\u4E00\u9FC3\uA000-\uA48C\uA500-\uA60C\uA610-\uA61F\uA62A-\uA62B\uA640-\uA65F\uA662-\uA66E\uA67F-\uA697\uA717-\uA71F\uA722-\uA788\uA78B-\uA78C\uA7FB-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA90A-\uA925\uA930-\uA946\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAC00\uD7A3\uF900-\uFA2D\uFA30-\uFA6A\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40-\uFB41\uFB43-\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]'; -let regex = new RegExp( +const regex = new RegExp( `(?:(?:(\\s?(?:^|[.\\(\\)!?;:"-])\\s*)(${character}))|(${character}))(${character}*[’']*${character}*)`, 'g', ); -let convertToRegExp = specials => +const convertToRegExp = specials => specials.map(s => [new RegExp(`\\b${s}\\b`, 'gi'), s]); function parseMatch(match) { diff --git a/packages/loot-core/src/server/accounts/transaction-rules.test.ts b/packages/loot-core/src/server/accounts/transaction-rules.test.ts index 9827e5cd2c6..bc09dc15f6c 100644 --- a/packages/loot-core/src/server/accounts/transaction-rules.test.ts +++ b/packages/loot-core/src/server/accounts/transaction-rules.test.ts @@ -27,8 +27,8 @@ beforeEach(async () => { }); async function getMatchingTransactions(conds) { - let { filters } = conditionsToAQL(conds); - let { data } = await runQuery( + const { filters } = conditionsToAQL(conds); + const { data } = await runQuery( q('transactions').filter({ $and: filters }).select('*'), ); return data; @@ -125,7 +125,7 @@ describe('Transaction rules', () => { spy.mockRestore(); // Finally make sure the rule is actually in place and runs - let transaction = runRules({ + const transaction = runRules({ date: '2019-05-10', notes: '', category: null, @@ -137,7 +137,7 @@ describe('Transaction rules', () => { test('update a rule in the database', async () => { await loadRules(); - let id = await insertRule({ + const id = await insertRule({ stage: 'pre', conditionsOp: 'and', conditions: [{ op: 'is', field: 'imported_payee', value: 'kroger' }], @@ -189,7 +189,7 @@ describe('Transaction rules', () => { test('delete a rule in the database', async () => { await loadRules(); - let id = await insertRule({ + const id = await insertRule({ stage: 'pre', conditionsOp: 'and', conditions: [{ op: 'is', field: 'payee', value: 'kroger' }], @@ -369,14 +369,17 @@ describe('Transaction rules', () => { test('transactions can be queried by rule', async () => { await loadRules(); - let account = await db.insertAccount({ name: 'bank' }); - let categoryGroupId = await db.insertCategoryGroup({ name: 'general' }); - let categoryId = await db.insertCategory({ + const account = await db.insertAccount({ name: 'bank' }); + const categoryGroupId = await db.insertCategoryGroup({ name: 'general' }); + const categoryId = await db.insertCategory({ name: 'food', cat_group: categoryGroupId, }); - let krogerId = await db.insertPayee({ name: 'kroger' }); - let lowesId = await db.insertPayee({ name: 'lowes', category: categoryId }); + const krogerId = await db.insertPayee({ name: 'kroger' }); + const lowesId = await db.insertPayee({ + name: 'lowes', + category: categoryId, + }); await db.insertTransaction({ id: '1', @@ -803,9 +806,9 @@ describe('Learning categories', () => { await updateCategoryRules([{ ...trans, id: 'three' }]); expect(getRules()).toMatchSnapshot(); - let rules = getRules(); - let getPayees = cat => { - let arr = rules + const rules = getRules(); + const getPayees = cat => { + const arr = rules .filter(rule => rule.actions[0].value === cat) .map(r => r.conditions.map(c => c.value)); return Array.prototype.concat.apply([], arr); @@ -823,7 +826,7 @@ describe('Learning categories', () => { await loadData(); expect(getRules().length).toBe(0); - let trans = { + const trans = { date: '2016-12-01', account: 'acct', payee: null, @@ -853,7 +856,7 @@ describe('Learning categories', () => { }); expect(getRules().length).toBe(2); - let trans = { + const trans = { date: '2016-12-01', account: 'acct', payee: null, @@ -866,7 +869,7 @@ describe('Learning categories', () => { // This should not have changed the category! This is tested // because this was a bug when rules were released - let rules = getRules(); + const rules = getRules(); expect(rules.length).toBe(2); expect(rules[0].actions[0].value).toBe('unknown1'); expect(rules[1].actions[0].value).toBe('unknown1'); @@ -887,7 +890,7 @@ describe('Learning categories', () => { // Internally, it should still be stored with the internal names // so that it's backwards compatible - let rawRule = await db.first('SELECT * FROM rules'); + const rawRule = await db.first('SELECT * FROM rules'); rawRule.conditions = JSON.parse(rawRule.conditions); rawRule.actions = JSON.parse(rawRule.actions); expect(rawRule.conditions[0].field).toBe('imported_description'); @@ -917,13 +920,13 @@ describe('Learning categories', () => { // This rule internally has been stored with the public names. // Making this work now allows us to switch to it by default in // the future - let rawRule = await db.first('SELECT * FROM rules'); + const rawRule = await db.first('SELECT * FROM rules'); rawRule.conditions = JSON.parse(rawRule.conditions); rawRule.actions = JSON.parse(rawRule.actions); expect(rawRule.conditions[0].field).toBe('imported_payee'); expect(rawRule.actions[0].field).toBe('payee'); - let [rule] = getRules(); + const [rule] = getRules(); expect(rule.conditions[0].field).toBe('imported_payee'); expect(rule.actions[0].field).toBe('payee'); }); diff --git a/packages/loot-core/src/server/accounts/transaction-rules.ts b/packages/loot-core/src/server/accounts/transaction-rules.ts index 4761375a32a..21d6db8a4bf 100644 --- a/packages/loot-core/src/server/accounts/transaction-rules.ts +++ b/packages/loot-core/src/server/accounts/transaction-rules.ts @@ -61,8 +61,8 @@ function invert(obj) { ); } -let internalFields = schemaConfig.views.transactions.fields; -let publicFields = invert(schemaConfig.views.transactions.fields); +const internalFields = schemaConfig.views.transactions.fields; +const publicFields = invert(schemaConfig.views.transactions.fields); function fromInternalField(obj: T): T { return { @@ -123,7 +123,7 @@ export const ruleModel = { }, toJS(row) { - let { conditions, conditions_op, actions, ...fields } = row; + const { conditions, conditions_op, actions, ...fields } = row; return { ...fields, conditionsOp: conditions_op, @@ -133,7 +133,7 @@ export const ruleModel = { }, fromJS(rule) { - let { conditions, conditionsOp, actions, ...row } = rule; + const { conditions, conditionsOp, actions, ...row } = rule; if (conditionsOp) { row.conditions_op = conditionsOp; } @@ -173,19 +173,19 @@ export function makeRule(data) { export async function loadRules() { resetState(); - let rules = await db.all(` + const rules = await db.all(` SELECT * FROM rules WHERE conditions IS NOT NULL AND actions IS NOT NULL AND tombstone = 0 `); for (let i = 0; i < rules.length; i++) { - let desc = rules[i]; + const desc = rules[i]; // These are old stages, can be removed before release if (desc.stage === 'cleanup' || desc.stage === 'modify') { desc.stage = 'pre'; } - let rule = makeRule(desc); + const rule = makeRule(desc); if (rule) { allRules.set(rule.id, rule); firstcharIndexer.index(rule); @@ -217,7 +217,7 @@ export async function updateRule(rule) { } export async function deleteRule(rule: T) { - let schedule = await db.first('SELECT id FROM schedules WHERE rule = ?', [ + const schedule = await db.first('SELECT id FROM schedules WHERE rule = ?', [ rule.id, ]); @@ -234,11 +234,11 @@ function onApplySync(oldValues, newValues) { newValues.forEach((items, table) => { if (table === 'rules') { items.forEach(newValue => { - let oldRule = allRules.get(newValue.id); + const oldRule = allRules.get(newValue.id); if (newValue.tombstone === 1) { // Deleted, need to remove it from in-memory - let rule = allRules.get(newValue.id); + const rule = allRules.get(newValue.id); if (rule) { allRules.delete(rule.getId()); firstcharIndexer.remove(rule); @@ -246,7 +246,7 @@ function onApplySync(oldValues, newValues) { } } else { // Inserted/updated - let rule = makeRule(newValue); + const rule = makeRule(newValue); if (rule) { if (oldRule) { firstcharIndexer.remove(oldRule); @@ -263,7 +263,7 @@ function onApplySync(oldValues, newValues) { // If any of the mapping tables have changed, we need to refresh the // ids - let tables = [...newValues.keys()]; + const tables = [...newValues.keys()]; if (tables.find(table => table.indexOf('mapping') !== -1)) { getRules().forEach(rule => { migrateIds(rule, getMappings()); @@ -275,7 +275,7 @@ function onApplySync(oldValues, newValues) { export function runRules(trans) { let finalTrans = { ...trans }; - let rules = rankRules( + const rules = rankRules( fastSetMerge( firstcharIndexer.getApplicableRules(trans), payeeIndexer.getApplicableRules(trans), @@ -291,7 +291,7 @@ export function runRules(trans) { // This does the inverse: finds all the transactions matching a rule export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { - let errors = []; + const errors = []; conditions = conditions .map(cond => { @@ -316,17 +316,17 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { .filter(Boolean); // rule -> actualql - let filters = conditions.map(cond => { - let { type, field, op, value, options } = cond; + const filters = conditions.map(cond => { + const { type, field, op, value, options } = cond; - let getValue = value => { + const getValue = value => { if (type === 'number') { return value.value; } return value; }; - let apply = (field, op, value) => { + const apply = (field, op, value) => { if (type === 'number') { if (options) { if (options.outflow) { @@ -357,7 +357,7 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { case 'is': if (type === 'date') { if (value.type === 'recur') { - let dates = value.schedule + const dates = value.schedule .occurrences({ take: recurDateBounds }) .toArray() .map(d => dayFromDate(d.date)); @@ -377,9 +377,9 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { }; } else { if (op === 'isapprox') { - let fullDate = parseDate(value.date); - let high = addDays(fullDate, 2); - let low = subDays(fullDate, 2); + const fullDate = parseDate(value.date); + const high = addDays(fullDate, 2); + const low = subDays(fullDate, 2); return { $and: [{ date: { $gte: low } }, { date: { $lte: high } }], @@ -389,15 +389,15 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { case 'date': return { date: value.date }; case 'month': { - let low = value.date + '-00'; - let high = value.date + '-99'; + const low = value.date + '-00'; + const high = value.date + '-99'; return { $and: [{ date: { $gte: low } }, { date: { $lte: high } }], }; } case 'year': { - let low = value.date + '-00-00'; - let high = value.date + '-99-99'; + const low = value.date + '-00-00'; + const high = value.date + '-99-99'; return { $and: [{ date: { $gte: low } }, { date: { $lte: high } }], }; @@ -407,9 +407,9 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { } } } else if (type === 'number') { - let number = value.value; + const number = value.value; if (op === 'isapprox') { - let threshold = getApproxNumberThreshold(number); + const threshold = getApproxNumberThreshold(number); return { $and: [ @@ -433,7 +433,7 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { case 'isbetween': // This operator is only applicable to the specific `between` // number type so we don't use `apply` - let [low, high] = sortNumbers(value.num1, value.num2); + const [low, high] = sortNumbers(value.num1, value.num2); return { [field]: [{ $gte: low }, { $lte: high }], }; @@ -454,14 +454,14 @@ export function conditionsToAQL(conditions, { recurDateBounds = 100 } = {}) { '%' + value + '%', ); case 'oneOf': - let values = value; + const values = value; if (values.length === 0) { // This forces it to match nothing return { id: null }; } return { $or: values.map(v => apply(field, '$eq', v)) }; case 'notOneOf': - let notValues = value; + const notValues = value; if (notValues.length === 0) { // This forces it to match nothing return { id: null }; @@ -491,7 +491,7 @@ export function applyActions( transactionIds: string[], actions: Array, ) { - let parsedActions = actions + const parsedActions = actions .map(action => { if (action instanceof Action) { return action; @@ -521,9 +521,9 @@ export function applyActions( return null; } - let updated = transactionIds.map(id => { - let update = { id }; - for (let action of parsedActions) { + const updated = transactionIds.map(id => { + const update = { id }; + for (const action of parsedActions) { action.exec(update); } return update; @@ -533,7 +533,7 @@ export function applyActions( } export function getRulesForPayee(payeeId) { - let rules = new Set(); + const rules = new Set(); iterateIds(getRules(), 'payee', (rule, id) => { if (id === payeeId) { rules.add(rule); @@ -549,9 +549,9 @@ function* getIsSetterRules( actionField, { condValue, actionValue }: { condValue?: string; actionValue?: string }, ) { - let rules = getRules(); + const rules = getRules(); for (let i = 0; i < rules.length; i++) { - let rule = rules[i]; + const rule = rules[i]; if ( rule.stage === stage && @@ -577,9 +577,9 @@ function* getOneOfSetterRules( actionField, { condValue, actionValue }: { condValue?: string; actionValue: string }, ) { - let rules = getRules(); + const rules = getRules(); for (let i = 0; i < rules.length; i++) { - let rule = rules[i]; + const rule = rules[i]; if ( rule.stage === stage && @@ -601,7 +601,7 @@ function* getOneOfSetterRules( } export async function updatePayeeRenameRule(fromNames: string[], to: string) { - let renameRule = getOneOfSetterRules('pre', 'imported_payee', 'payee', { + const renameRule = getOneOfSetterRules('pre', 'imported_payee', 'payee', { actionValue: to, }).next().value; @@ -612,21 +612,21 @@ export async function updatePayeeRenameRule(fromNames: string[], to: string) { // case we could improve in the future, but this is fine for now. if (renameRule) { - let condition = renameRule.conditions[0]; - let newValue = [ + const condition = renameRule.conditions[0]; + const newValue = [ ...fastSetMerge( new Set(condition.value), new Set(fromNames.filter(name => name !== '')), ), ]; - let rule = { + const rule = { ...renameRule, conditions: [{ ...condition, value: newValue }], }; await updateRule(rule); return renameRule.id; } else { - let rule = new Rule({ + const rule = new Rule({ stage: 'pre', conditionsOp: 'and', conditions: [{ op: 'oneOf', field: 'imported_payee', value: fromNames }], @@ -638,7 +638,7 @@ export async function updatePayeeRenameRule(fromNames: string[], to: string) { } export function getProbableCategory(transactions) { - let scores = new Map(); + const scores = new Map(); transactions.forEach(trans => { if (trans.category) { @@ -646,8 +646,8 @@ export function getProbableCategory(transactions) { } }); - let winner = transactions.reduce((winner, trans) => { - let score = scores.get(trans.category); + const winner = transactions.reduce((winner, trans) => { + const score = scores.get(trans.category); if (!winner || score > winner.score) { return { score, category: trans.category }; } @@ -662,8 +662,8 @@ export async function updateCategoryRules(transactions) { return; } - let payeeIds = new Set(transactions.map(trans => trans.payee)); - let transIds = new Set(transactions.map(trans => trans.id)); + const payeeIds = new Set(transactions.map(trans => trans.payee)); + const transIds = new Set(transactions.map(trans => trans.id)); // It's going to be quickest to get the oldest date and then query // all transactions since then so we can work in memory @@ -682,7 +682,7 @@ export async function updateCategoryRules(transactions) { // Also look 180 days in the future to get any future transactions // (this might change when we think about scheduled transactions) - let register = await db.all( + const register = await db.all( `SELECT t.* FROM v_transactions t LEFT JOIN accounts a ON a.id = t.account WHERE date >= ? AND date <= ? AND is_parent = 0 AND a.closed = 0 @@ -690,18 +690,18 @@ export async function updateCategoryRules(transactions) { [toDateRepr(oldestDate), toDateRepr(addDays(currentDay(), 180))], ); - let allTransactions = partitionByField(register, 'payee'); - let categoriesToSet = new Map(); + const allTransactions = partitionByField(register, 'payee'); + const categoriesToSet = new Map(); - for (let payeeId of payeeIds) { + for (const payeeId of payeeIds) { // Don't do anything if payee is null if (payeeId) { - let latestTrans = (allTransactions.get(payeeId) || []).slice(0, 5); + const latestTrans = (allTransactions.get(payeeId) || []).slice(0, 5); // Check if one of the latest transactions was one that was // updated. We only want to update anything if so. if (latestTrans.find(trans => transIds.has(trans.id))) { - let category = getProbableCategory(latestTrans); + const category = getProbableCategory(latestTrans); if (category) { categoriesToSet.set(payeeId, category); } @@ -710,8 +710,8 @@ export async function updateCategoryRules(transactions) { } await batchMessages(async () => { - for (let [payeeId, category] of categoriesToSet.entries()) { - let ruleSetters = [ + for (const [payeeId, category] of categoriesToSet.entries()) { + const ruleSetters = [ ...getIsSetterRules(null, 'payee', 'category', { condValue: payeeId, }), @@ -724,8 +724,8 @@ export async function updateCategoryRules(transactions) { // because 2 clients made them independently. Not really a big // deal, but to make sure our update gets applied set it to // all of them - for (let rule of ruleSetters) { - let action = rule.actions[0]; + for (const rule of ruleSetters) { + const action = rule.actions[0]; if (action.value !== category) { await updateRule({ ...rule, @@ -735,7 +735,7 @@ export async function updateCategoryRules(transactions) { } } else { // No existing rules, so create one - let newRule = new Rule({ + const newRule = new Rule({ stage: null, conditionsOp: 'and', conditions: [{ op: 'is', field: 'payee', value: payeeId }], diff --git a/packages/loot-core/src/server/accounts/transactions.ts b/packages/loot-core/src/server/accounts/transactions.ts index 30f4ea94ae9..285fea2cbc1 100644 --- a/packages/loot-core/src/server/accounts/transactions.ts +++ b/packages/loot-core/src/server/accounts/transactions.ts @@ -8,12 +8,12 @@ import * as rules from './transaction-rules'; import * as transfer from './transfer'; async function idsWithChildren(ids: string[]) { - let whereIds = whereIn(ids, 'parent_id'); - let rows = await db.all( + const whereIds = whereIn(ids, 'parent_id'); + const rows = await db.all( `SELECT id FROM v_transactions_internal WHERE ${whereIds}`, ); - let set = new Set(ids); - for (let row of rows) { + const set = new Set(ids); + for (const row of rows) { set.add(row.id); } return [...set]; @@ -56,20 +56,22 @@ export async function batchUpdateTransactions({ }) { // Track the ids of each type of transaction change (see below for why) let addedIds = []; - let updatedIds = updated ? updated.map(u => u.id) : []; - let deletedIds = deleted ? await idsWithChildren(deleted.map(d => d.id)) : []; + const updatedIds = updated ? updated.map(u => u.id) : []; + const deletedIds = deleted + ? await idsWithChildren(deleted.map(d => d.id)) + : []; - let oldPayees = new Set(); - let accounts = await db.all('SELECT * FROM accounts WHERE tombstone = 0'); + const oldPayees = new Set(); + const accounts = await db.all('SELECT * FROM accounts WHERE tombstone = 0'); // We need to get all the payees of updated transactions _before_ // making changes if (updated) { - let descUpdatedIds = updated + const descUpdatedIds = updated .filter(update => update.payee) .map(update => update.id); - let transactions = await getTransactionsByIds(descUpdatedIds); + const transactions = await getTransactionsByIds(descUpdatedIds); for (let i = 0; i < transactions.length; i++) { oldPayees.add(transactions[i].payee); @@ -103,7 +105,7 @@ export async function batchUpdateTransactions({ if (t.account) { // Moving transactions off budget should always clear the // category - let account = accounts.find(acct => acct.id === t.account); + const account = accounts.find(acct => acct.id === t.account); if (account.offbudget === 1) { t.category = null; } @@ -119,16 +121,16 @@ export async function batchUpdateTransactions({ // needed to run any cascading logic that depends on the full // transaction. Things like transfers, analyzing rule updates, and // more - let allAdded = await getTransactionsByIds(addedIds); - let allUpdated = await getTransactionsByIds(updatedIds); - let allDeleted = await getTransactionsByIds(deletedIds); + const allAdded = await getTransactionsByIds(addedIds); + const allUpdated = await getTransactionsByIds(updatedIds); + const allDeleted = await getTransactionsByIds(deletedIds); // Post-processing phase: first do any updates to transfers. // Transfers update the transactions and we need to return updates // to the client so that can apply them. Note that added // transactions just return the full transaction. - let resultAdded = allAdded; - let resultUpdated = allUpdated; + const resultAdded = allAdded; + const resultUpdated = allUpdated; let transfersUpdated: Awaited>[]; if (runTransfers) { @@ -147,7 +149,7 @@ export async function batchUpdateTransactions({ if (learnCategories) { // Analyze any updated categories and update rules to learn from // the user's activity - let ids = new Set([ + const ids = new Set([ ...(added ? added.filter(add => add.category).map(add => add.id) : []), ...(updated ? updated.filter(update => update.category).map(update => update.id) @@ -163,11 +165,11 @@ export async function batchUpdateTransactions({ // them if (updated) { - let newPayeeIds = updated.map(u => u.payee).filter(Boolean); + const newPayeeIds = updated.map(u => u.payee).filter(Boolean); if (newPayeeIds.length > 0) { - let allOrphaned = new Set(await db.getOrphanedPayees()); + const allOrphaned = new Set(await db.getOrphanedPayees()); - let orphanedIds = [...oldPayees].filter(id => allOrphaned.has(id)); + const orphanedIds = [...oldPayees].filter(id => allOrphaned.has(id)); if (orphanedIds.length > 0) { connection.send('orphaned-payees', { diff --git a/packages/loot-core/src/server/accounts/transfer.test.ts b/packages/loot-core/src/server/accounts/transfer.test.ts index 5788a301c7f..f569098b3f5 100644 --- a/packages/loot-core/src/server/accounts/transfer.test.ts +++ b/packages/loot-core/src/server/accounts/transfer.test.ts @@ -58,12 +58,12 @@ describe('Transfer', () => { await db.insertTransaction(transaction); await transfer.onInsert(transaction); - let differ = expectSnapshotWithDiffer(await getAllTransactions()); + const differ = expectSnapshotWithDiffer(await getAllTransactions()); - let transferTwo = await db.first( + const transferTwo = await db.first( "SELECT * FROM payees WHERE transfer_acct = 'two'", ); - let transferThree = await db.first( + const transferThree = await db.first( "SELECT * FROM payees WHERE transfer_acct = 'three'", ); @@ -130,10 +130,10 @@ describe('Transfer', () => { test('transfers are properly de-categorized', async () => { await prepareDatabase(); - let transferTwo = await db.first( + const transferTwo = await db.first( "SELECT * FROM payees WHERE transfer_acct = 'two'", ); - let transferThree = await db.first( + const transferThree = await db.first( "SELECT * FROM payees WHERE transfer_acct = 'three'", ); @@ -147,7 +147,7 @@ describe('Transfer', () => { transaction.id = await db.insertTransaction(transaction); await transfer.onInsert(transaction); - let differ = expectSnapshotWithDiffer(await getAllTransactions()); + const differ = expectSnapshotWithDiffer(await getAllTransactions()); transaction = { ...(await db.getTransaction(transaction.id)), diff --git a/packages/loot-core/src/server/accounts/transfer.ts b/packages/loot-core/src/server/accounts/transfer.ts index 8d4e0d84f8d..a9b31fc858f 100644 --- a/packages/loot-core/src/server/accounts/transfer.ts +++ b/packages/loot-core/src/server/accounts/transfer.ts @@ -6,7 +6,7 @@ async function getPayee(acct) { async function getTransferredAccount(transaction) { if (transaction.payee) { - let { transfer_acct } = await db.first( + const { transfer_acct } = await db.first( 'SELECT id, transfer_acct FROM v_payees WHERE id = ?', [transaction.payee], ); @@ -44,14 +44,14 @@ export async function addTransfer(transaction, transferredAccount) { return null; } - let { id: fromPayee } = await db.first( + const { id: fromPayee } = await db.first( 'SELECT id FROM payees WHERE transfer_acct = ?', [transaction.account], ); // We need to enforce certain constraints with child transaction transfers if (transaction.parent_id) { - let row = await db.first( + const row = await db.first( ` SELECT p.id, p.transfer_acct FROM v_transactions t LEFT JOIN payees p ON p.id = t.payee @@ -93,7 +93,7 @@ export async function addTransfer(transaction, transferredAccount) { } export async function removeTransfer(transaction) { - let transferTrans = await db.getTransaction(transaction.transfer_id); + const transferTrans = await db.getTransaction(transaction.transfer_id); // Perform operations on the transfer transaction only // if it is found. For example: when users delete both @@ -118,7 +118,7 @@ export async function removeTransfer(transaction) { } export async function updateTransfer(transaction, transferredAccount) { - let payee = await getPayee(transaction.account); + const payee = await getPayee(transaction.account); await db.updateTransaction({ id: transaction.transfer_id, @@ -138,7 +138,7 @@ export async function updateTransfer(transaction, transferredAccount) { } export async function onInsert(transaction) { - let transferredAccount = await getTransferredAccount(transaction); + const transferredAccount = await getTransferredAccount(transaction); if (transferredAccount) { return addTransfer(transaction, transferredAccount); diff --git a/packages/loot-core/src/server/api-models.ts b/packages/loot-core/src/server/api-models.ts index 53a4ae52fc9..04bdc76f417 100644 --- a/packages/loot-core/src/server/api-models.ts +++ b/packages/loot-core/src/server/api-models.ts @@ -13,7 +13,7 @@ export const accountModel = { }, fromExternal(account) { - let result = { ...account }; + const result = { ...account }; if ('offbudget' in account) { result.offbudget = account.offbudget ? 1 : 0; } @@ -37,7 +37,7 @@ export const categoryModel = { }, fromExternal(category) { - let { group_id: _, ...result } = category; + const { group_id: _, ...result } = category; if ('is_income' in category) { result.is_income = category.is_income ? 1 : 0; } @@ -61,7 +61,7 @@ export const categoryGroupModel = { }, fromExternal(group) { - let result = { ...group }; + const result = { ...group }; if ('is_income' in group) { result.is_income = group.is_income ? 1 : 0; } diff --git a/packages/loot-core/src/server/api.ts b/packages/loot-core/src/server/api.ts index eee5f28c14f..29288bdc8d6 100644 --- a/packages/loot-core/src/server/api.ts +++ b/packages/loot-core/src/server/api.ts @@ -47,10 +47,10 @@ function withMutation(handler) { return args => { return runMutator( async () => { - let latestTimestamp = getClock().timestamp.toString(); - let result = await handler(args); + const latestTimestamp = getClock().timestamp.toString(); + const result = await handler(args); - let rows = await db.all( + const rows = await db.all( 'SELECT DISTINCT dataset FROM messages_crdt WHERE timestamp > ?', [latestTimestamp], ); @@ -78,8 +78,8 @@ async function validateMonth(month) { } if (!IMPORT_MODE) { - let { start, end } = await handlers['get-budget-bounds'](); - let range = monthUtils.range(start, end); + const { start, end } = await handlers['get-budget-bounds'](); + const range = monthUtils.range(start, end); if (!range.includes(month)) { throw APIError('No budget exists for month: ' + month); } @@ -91,7 +91,7 @@ async function validateExpenseCategory(debug, id) { throw APIError(`${debug}: category id is required`); } - let row = await db.first('SELECT is_income FROM categories WHERE id = ?', [ + const row = await db.first('SELECT is_income FROM categories WHERE id = ?', [ id, ]); @@ -145,11 +145,11 @@ handlers['api/batch-budget-end'] = async function () { }; handlers['api/load-budget'] = async function ({ id }) { - let { id: currentId } = prefs.getPrefs() || {}; + const { id: currentId } = prefs.getPrefs() || {}; if (currentId !== id) { connection.send('start-load'); - let { error } = await handlers['load-budget']({ id }); + const { error } = await handlers['load-budget']({ id }); if (!error) { connection.send('finish-load'); @@ -162,26 +162,26 @@ handlers['api/load-budget'] = async function ({ id }) { }; handlers['api/download-budget'] = async function ({ syncId, password }) { - let { id: currentId } = prefs.getPrefs() || {}; + const { id: currentId } = prefs.getPrefs() || {}; if (currentId) { await handlers['close-budget'](); } - let localBudget = (await handlers['get-budgets']()).find( + const localBudget = (await handlers['get-budgets']()).find( b => b.groupId === syncId, ); if (localBudget) { await handlers['load-budget']({ id: localBudget.id }); - let result = await handlers['sync-budget'](); + const result = await handlers['sync-budget'](); if (result.error) { throw new Error(getSyncError(result.error, localBudget.id)); } } else { - let files = await handlers['get-remote-files'](); + const files = await handlers['get-remote-files'](); if (!files) { throw new Error('Could not get remote files'); } - let file = files.find(f => f.groupId === syncId); + const file = files.find(f => f.groupId === syncId); if (!file) { throw new Error( `Budget “${syncId}” not found. Check the sync id of your budget in the Advanced section of the settings page.`, @@ -193,7 +193,7 @@ handlers['api/download-budget'] = async function ({ syncId, password }) { ); } if (password) { - let result = await handlers['key-test']({ + const result = await handlers['key-test']({ fileId: file.fileId, password, }); @@ -202,7 +202,7 @@ handlers['api/download-budget'] = async function ({ syncId, password }) { } } - let result = await handlers['download-budget']({ fileId: file.fileId }); + const result = await handlers['download-budget']({ fileId: file.fileId }); if (result.error) { console.log('Full error details', result.error); throw new Error(getDownloadError(result.error)); @@ -212,8 +212,8 @@ handlers['api/download-budget'] = async function ({ syncId, password }) { }; handlers['api/sync'] = async function () { - let { id } = prefs.getPrefs(); - let result = await handlers['sync-budget'](); + const { id } = prefs.getPrefs(); + const result = await handlers['sync-budget'](); if (result.error) { throw new Error(getSyncError(result.error, id)); } @@ -245,7 +245,7 @@ handlers['api/finish-import'] = async function () { // We always need to fully reload the app. Importing doesn't touch // the spreadsheet, but we can't just recreate the spreadsheet // either; there is other internal state that isn't created - let { id } = prefs.getPrefs(); + const { id } = prefs.getPrefs(); await handlers['close-budget'](); await handlers['load-budget']({ id }); @@ -262,7 +262,7 @@ handlers['api/abort-import'] = async function () { if (IMPORT_MODE) { checkFileOpen(); - let { id } = prefs.getPrefs(); + const { id } = prefs.getPrefs(); await handlers['close-budget'](); await handlers['delete-budget']({ id }); @@ -279,7 +279,7 @@ handlers['api/query'] = async function ({ query }) { handlers['api/budget-months'] = async function () { checkFileOpen(); - let { start, end } = await handlers['get-budget-bounds'](); + const { start, end } = await handlers['get-budget-bounds'](); return monthUtils.range(start, end); }; @@ -287,11 +287,11 @@ handlers['api/budget-month'] = async function ({ month }) { checkFileOpen(); await validateMonth(month); - let groups = await db.getCategoriesGrouped(); - let sheetName = monthUtils.sheetForMonth(month); + const groups = await db.getCategoriesGrouped(); + const sheetName = monthUtils.sheetForMonth(month); function value(name) { - let v = sheet.get().getCellValue(sheetName, name); + const v = sheet.get().getCellValue(sheetName, name); return v === '' ? 0 : v; } @@ -410,7 +410,7 @@ handlers['api/transactions-get'] = async function ({ endDate, }) { checkFileOpen(); - let { data } = await aqlQuery( + const { data } = await aqlQuery( q('transactions') .filter({ $and: [ @@ -434,37 +434,37 @@ handlers['api/transaction-update'] = withMutation(async function ({ fields, }) { checkFileOpen(); - let { data } = await aqlQuery( + const { data } = await aqlQuery( q('transactions').filter({ id }).select('*').options({ splits: 'grouped' }), ); - let transactions = ungroupTransactions(data); + const transactions = ungroupTransactions(data); if (transactions.length === 0) { return []; } - let { diff } = updateTransaction(transactions, fields); + const { diff } = updateTransaction(transactions, fields); return handlers['transactions-batch-update'](diff); }); handlers['api/transaction-delete'] = withMutation(async function ({ id }) { checkFileOpen(); - let { data } = await aqlQuery( + const { data } = await aqlQuery( q('transactions').filter({ id }).select('*').options({ splits: 'grouped' }), ); - let transactions = ungroupTransactions(data); + const transactions = ungroupTransactions(data); if (transactions.length === 0) { return []; } - let { diff } = deleteTransaction(transactions, id); + const { diff } = deleteTransaction(transactions, id); return handlers['transactions-batch-update'](diff); }); handlers['api/accounts-get'] = async function () { checkFileOpen(); - let accounts = await db.getAccounts(); + const accounts = await db.getAccounts(); return accounts.map(account => accountModel.toExternal(account)); }; @@ -515,7 +515,7 @@ handlers['api/categories-get'] = async function ({ grouped, }: { grouped? } = {}) { checkFileOpen(); - let result = await handlers['get-categories'](); + const result = await handlers['get-categories'](); return grouped ? result.grouped.map(categoryGroupModel.toExternal) : result.list.map(categoryModel.toExternal); @@ -580,7 +580,7 @@ handlers['api/category-delete'] = withMutation(async function ({ handlers['api/payees-get'] = async function () { checkFileOpen(); - let payees = await handlers['payees-get'](); + const payees = await handlers['payees-get'](); return payees.map(payeeModel.toExternal); }; @@ -602,7 +602,7 @@ handlers['api/payee-delete'] = withMutation(async function ({ id }) { }); export default function installAPI(serverHandlers: ServerHandlers) { - let merged = Object.assign({}, serverHandlers, handlers); + const merged = Object.assign({}, serverHandlers, handlers); handlers = merged as Handlers; return merged; } diff --git a/packages/loot-core/src/server/app.ts b/packages/loot-core/src/server/app.ts index e52ffa2940a..a4de8ecdb80 100644 --- a/packages/loot-core/src/server/app.ts +++ b/packages/loot-core/src/server/app.ts @@ -37,7 +37,7 @@ class App { } combine(...apps) { - for (let app of apps) { + for (const app of apps) { Object.keys(app.handlers).forEach(name => { this.method(name as string & keyof Handlers, app.handlers[name]); }); @@ -46,8 +46,8 @@ class App { this.service(service); }); - for (let [name, listeners] of app.events.all.entries()) { - for (let listener of listeners) { + for (const [name, listeners] of app.events.all.entries()) { + for (const listener of listeners) { this.events.on(name, listener); } } diff --git a/packages/loot-core/src/server/aql/compiler.test.ts b/packages/loot-core/src/server/aql/compiler.test.ts index f7f861783bc..42712cb9aca 100644 --- a/packages/loot-core/src/server/aql/compiler.test.ts +++ b/packages/loot-core/src/server/aql/compiler.test.ts @@ -9,7 +9,7 @@ function sqlLines(str) { .map(line => line.trim()); } -let basicSchema = { +const basicSchema = { transactions: { id: { type: 'id' }, date: { type: 'date' }, @@ -20,7 +20,7 @@ let basicSchema = { }, }; -let schemaWithRefs = { +const schemaWithRefs = { transactions: { id: { type: 'id' }, payee: { type: 'id', ref: 'payees' }, @@ -40,7 +40,7 @@ let schemaWithRefs = { }, }; -let schemaWithTombstone = { +const schemaWithTombstone = { transactions: { id: { type: 'id' }, payee: { type: 'id', ref: 'payees' }, @@ -123,7 +123,7 @@ describe('sheet language', () => { }); it('`select` allows nested functions', () => { - let result = generateSQLWithState( + const result = generateSQLWithState( query('transactions') .select([{ num: { $idiv: [{ $neg: '$amount' }, 2] } }]) .serialize(), @@ -302,7 +302,7 @@ describe('sheet language', () => { }); it('avoids unnecessary joins when deeply joining', () => { - let { state, sql } = generateSQLWithState( + const { state, sql } = generateSQLWithState( query('transactions') .filter({ 'payee.account.trans1.amount': 1, @@ -631,7 +631,7 @@ describe('sheet language', () => { .serialize(), schemaWithRefs, ); - let monthParam = result.state.namedParameters.find( + const monthParam = result.state.namedParameters.find( p => p.paramName === 'month', ); expect(monthParam.paramType).toBe('date-month'); @@ -693,7 +693,7 @@ describe('sheet language', () => { }); it('raw mode avoids any internal filters', () => { - let result = generateSQLWithState( + const result = generateSQLWithState( query('transactions').select(['amount']).raw().serialize(), schemaWithRefs, { @@ -781,7 +781,7 @@ describe('sheet language', () => { }); it('$oneof creates template for executor to run', () => { - let result = generateSQLWithState( + const result = generateSQLWithState( query('transactions') .filter({ id: { $oneof: ['one', 'two', 'three'] } }) .select(['amount']) diff --git a/packages/loot-core/src/server/aql/compiler.ts b/packages/loot-core/src/server/aql/compiler.ts index 2610939a515..e74882be6dd 100644 --- a/packages/loot-core/src/server/aql/compiler.ts +++ b/packages/loot-core/src/server/aql/compiler.ts @@ -11,7 +11,7 @@ function uid(tableName) { class CompileError extends Error {} function nativeDateToInt(date) { - let pad = x => (x < 10 ? '0' : '') + x; + const pad = x => (x < 10 ? '0' : '') + x; return date.getFullYear() + pad(date.getMonth() + 1) + pad(date.getDate()); } @@ -20,12 +20,12 @@ function dateToInt(date) { } function addTombstone(schema, tableName, tableId, whereStr) { - let hasTombstone = schema[tableName].tombstone != null; + const hasTombstone = schema[tableName].tombstone != null; return hasTombstone ? `${whereStr} AND ${tableId}.tombstone = 0` : whereStr; } function popPath(path) { - let parts = path.split('.'); + const parts = path.split('.'); return { path: parts.slice(0, -1).join('.'), field: parts[parts.length - 1] }; } @@ -47,7 +47,7 @@ function getFieldDescription(schema, tableName, field) { throw new CompileError(`Table “${tableName}” does not exist in the schema`); } - let fieldDesc = schema[tableName][field]; + const fieldDesc = schema[tableName][field]; if (fieldDesc == null) { throw new CompileError( `Field “${field}” does not exist in table “${tableName}”`, @@ -57,17 +57,17 @@ function getFieldDescription(schema, tableName, field) { } function makePath(state, path) { - let { schema, paths } = state; + const { schema, paths } = state; - let parts = path.split('.'); + const parts = path.split('.'); if (parts.length < 2) { throw new CompileError('Invalid path: ' + path); } - let initialTable = parts[0]; + const initialTable = parts[0]; - let tableName = parts.slice(1).reduce((tableName, field) => { - let table = schema[tableName]; + const tableName = parts.slice(1).reduce((tableName, field) => { + const table = schema[tableName]; if (table == null) { throw new CompileError(`Path error: ${tableName} table does not exist`); @@ -83,12 +83,12 @@ function makePath(state, path) { }, initialTable); let joinTable; - let parentParts = parts.slice(0, -1); + const parentParts = parts.slice(0, -1); if (parentParts.length === 1) { joinTable = parentParts[0]; } else { - let parentPath = parentParts.join('.'); - let parentDesc = paths.get(parentPath); + const parentPath = parentParts.join('.'); + const parentDesc = paths.get(parentPath); if (!parentDesc) { throw new CompileError('Path does not exist: ' + parentPath); } @@ -108,7 +108,7 @@ function resolvePath(state, path) { paths = paths.reduce( (acc, name) => { - let fullName = acc.context + '.' + name; + const fullName = acc.context + '.' + name; return { context: fullName, path: [...acc.path, fullName], @@ -123,7 +123,7 @@ function resolvePath(state, path) { } }); - let pathInfo = state.paths.get(paths[paths.length - 1]); + const pathInfo = state.paths.get(paths[paths.length - 1]); return pathInfo; } @@ -132,8 +132,9 @@ function transformField(state, name) { throw new CompileError('Invalid field name, must be a string'); } - let { path, field } = popPath(name); + const { path, field: originalField } = popPath(name); + let field = originalField; let pathInfo; if (path === '') { pathInfo = { @@ -144,7 +145,11 @@ function transformField(state, name) { pathInfo = resolvePath(state, path); } - let fieldDesc = getFieldDescription(state.schema, pathInfo.tableName, field); + const fieldDesc = getFieldDescription( + state.schema, + pathInfo.tableName, + field, + ); // If this is a field that references an item in another table, that // item could have been deleted. If that's the case, we want to @@ -158,7 +163,7 @@ function transformField(state, name) { fieldDesc.type === 'id' && field !== 'id' ) { - let refPath = state.implicitTableName + '.' + name; + const refPath = state.implicitTableName + '.' + name; let refPathInfo = state.paths.get(refPath); if (!refPathInfo) { @@ -171,12 +176,12 @@ function transformField(state, name) { pathInfo = refPathInfo; } - let fieldStr = pathInfo.tableId + '.' + field; + const fieldStr = pathInfo.tableId + '.' + field; return typed(fieldStr, fieldDesc.type); } function parseDate(str) { - let m = str.match(/^(\d{4}-\d{2}-\d{2})$/); + const m = str.match(/^(\d{4}-\d{2}-\d{2})$/); if (m) { return typed(dateToInt(m[1]), 'date', { literal: true }); } @@ -184,7 +189,7 @@ function parseDate(str) { } function parseMonth(str) { - let m = str.match(/^(\d{4}-\d{2})$/); + const m = str.match(/^(\d{4}-\d{2})$/); if (m) { return typed(dateToInt(m[1]), 'date', { literal: true }); } @@ -192,7 +197,7 @@ function parseMonth(str) { } function parseYear(str) { - let m = str.match(/^(\d{4})$/); + const m = str.match(/^(\d{4})$/); if (m) { return typed(dateToInt(m[1]), 'date', { literal: true }); } @@ -204,9 +209,9 @@ function badDateFormat(str, type) { } function inferParam(param, type) { - let existingType = param.paramType; + const existingType = param.paramType; if (existingType) { - let casts = { + const casts = { date: ['string'], 'date-month': ['date'], 'date-year': ['date', 'date-month'], @@ -338,7 +343,7 @@ function val(state, expr, type?: string) { return `'${castedExpr.value}'`; } else if (castedExpr.type === 'string') { // Escape quotes - let value = castedExpr.value.replace(/'/g, "''"); + const value = castedExpr.value.replace(/'/g, "''"); return `'${value}'`; } /* eslint-enable rulesdir/typography */ @@ -376,7 +381,7 @@ function saveStack(type, func) { } state.compileStack.push({ type, args }); - let ret = func(state, ...args); + const ret = func(state, ...args); state.compileStack.pop(); return ret; }; @@ -389,9 +394,9 @@ function prettyValue(value) { return 'undefined'; } - let str = JSON.stringify(value); + const str = JSON.stringify(value); if (str.length > 70) { - let expanded = JSON.stringify(value, null, 2); + const expanded = JSON.stringify(value, null, 2); return expanded.split('\n').join('\n '); } return str; @@ -411,7 +416,7 @@ function getCompileError(error, stack) { case 'function': return prettyValue(entry.args[0]); case 'op': { - let [fieldRef, opData] = entry.args; + const [fieldRef, opData] = entry.args; return prettyValue({ [fieldRef]: opData }); } case 'value': @@ -474,7 +479,7 @@ const compileExpr = saveStack('expr', (state, expr) => { if (typeof expr === 'string') { // Field reference if (expr[0] === '$') { - let fieldRef = expr === '$' ? state.implicitField : expr.slice(1); + const fieldRef = expr === '$' ? state.implicitField : expr.slice(1); if (fieldRef == null || fieldRef === '') { throw new CompileError('Invalid field reference: ' + expr); @@ -485,7 +490,7 @@ const compileExpr = saveStack('expr', (state, expr) => { // Named parameter if (expr[0] === ':') { - let param = { value: '?', type: 'param', paramName: expr.slice(1) }; + const param = { value: '?', type: 'param', paramName: expr.slice(1) }; state.namedParameters.push(param); return param; } @@ -507,7 +512,7 @@ const compileExpr = saveStack('expr', (state, expr) => { }); const compileFunction = saveStack('function', (state, func) => { - let [name] = Object.keys(func); + const [name] = Object.keys(func); let argExprs = func[name]; if (!Array.isArray(argExprs)) { argExprs = [argExprs]; @@ -529,13 +534,13 @@ const compileFunction = saveStack('function', (state, func) => { // aggregate functions case '$sum': { validateArgLength(args, 1); - let [arg1] = valArray(state, args, ['float']); + const [arg1] = valArray(state, args, ['float']); return typed(`SUM(${arg1})`, args[0].type); } case '$sumOver': { - let [arg1] = valArray(state, args, ['float']); - let order = state.orders + const [arg1] = valArray(state, args, ['float']); + const order = state.orders ? 'ORDER BY ' + compileOrderBy(state, state.orders) : ''; @@ -547,14 +552,14 @@ const compileFunction = saveStack('function', (state, func) => { case '$count': { validateArgLength(args, 1); - let [arg1] = valArray(state, args); + const [arg1] = valArray(state, args); return typed(`COUNT(${arg1})`, 'integer'); } // string functions case '$substr': { validateArgLength(args, 2, 3); - let [arg1, arg2, arg3] = valArray(state, args, [ + const [arg1, arg2, arg3] = valArray(state, args, [ 'string', 'integer', 'integer', @@ -563,7 +568,7 @@ const compileFunction = saveStack('function', (state, func) => { } case '$lower': { validateArgLength(args, 1); - let [arg1] = valArray(state, args, ['string']); + const [arg1] = valArray(state, args, ['string']); return typed(`UNICODE_LOWER(${arg1})`, 'string'); } @@ -600,12 +605,12 @@ const compileFunction = saveStack('function', (state, func) => { // various functions case '$condition': validateArgLength(args, 1); - let conds = compileConditions(state, args[0]); + const conds = compileConditions(state, args[0]); return typed(conds.join(' AND '), 'boolean'); case '$nocase': validateArgLength(args, 1); - let [arg1] = valArray(state, args, ['string']); + const [arg1] = valArray(state, args, ['string']); return typed(`${arg1} COLLATE NOCASE`, args[0].type); case '$literal': { @@ -621,10 +626,10 @@ const compileFunction = saveStack('function', (state, func) => { }); const compileOp = saveStack('op', (state, fieldRef, opData) => { - let { $transform, ...opExpr } = opData; - let [op] = Object.keys(opExpr); + const { $transform, ...opExpr } = opData; + const [op] = Object.keys(opExpr); - let rhs = compileExpr(state, opData[op]); + const rhs = compileExpr(state, opData[op]); let lhs; if ($transform) { @@ -638,19 +643,19 @@ const compileOp = saveStack('op', (state, fieldRef, opData) => { switch (op) { case '$gte': { - let [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); + const [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); return `${left} >= ${right}`; } case '$lte': { - let [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); + const [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); return `${left} <= ${right}`; } case '$gt': { - let [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); + const [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); return `${left} > ${right}`; } case '$lt': { - let [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); + const [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); return `${left} < ${right}`; } case '$eq': { @@ -658,10 +663,10 @@ const compileOp = saveStack('op', (state, fieldRef, opData) => { return `${val(state, lhs)} IS NULL`; } - let [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); + const [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); if (rhs.type === 'param') { - let orders = state.namedParameters.map(param => { + const orders = state.namedParameters.map(param => { return param === rhs || param === lhs ? [param, { ...param }] : param; }); state.namedParameters = [].concat.apply([], orders); @@ -679,10 +684,10 @@ const compileOp = saveStack('op', (state, fieldRef, opData) => { return `${val(state, lhs)} IS NULL`; } - let [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); + const [left, right] = valArray(state, [lhs, rhs], [null, lhs.type]); if (rhs.type === 'param') { - let orders = state.namedParameters.map(param => { + const orders = state.namedParameters.map(param => { return param === rhs || param === lhs ? [param, { ...param }] : param; }); state.namedParameters = [].concat.apply([], orders); @@ -696,18 +701,18 @@ const compileOp = saveStack('op', (state, fieldRef, opData) => { return `${left} != ${right}`; } case '$oneof': { - let [left, right] = valArray(state, [lhs, rhs], [null, 'array']); + const [left, right] = valArray(state, [lhs, rhs], [null, 'array']); // Dedupe the ids - let ids = [...new Set(right)]; + const ids = [...new Set(right)]; // eslint-disable-next-line rulesdir/typography return `${left} IN (` + ids.map(id => `'${id}'`).join(',') + ')'; } case '$like': { - let [left, right] = valArray(state, [lhs, rhs], ['string', 'string']); + const [left, right] = valArray(state, [lhs, rhs], ['string', 'string']); return `${left} LIKE ${right}`; } case '$notlike': { - let [left, right] = valArray(state, [lhs, rhs], ['string', 'string']); + const [left, right] = valArray(state, [lhs, rhs], ['string', 'string']); return `(${left} NOT LIKE ${right}\n OR ${left} IS NULL)`; } default: @@ -725,7 +730,7 @@ function compileConditions(state, conds) { } return conds.filter(Boolean).reduce((res, condsObj) => { - let compiled = Object.entries(condsObj) + const compiled = Object.entries(condsObj) .map(([field, cond]) => { // Allow a falsy value in the lhs of $and and $or to allow for // quick forms like `$or: amount != 0 && ...` @@ -768,7 +773,7 @@ function compileOr(state, conds) { if (!conds) { return '0'; } - let res = compileConditions(state, conds); + const res = compileConditions(state, conds); if (res.length === 0) { return '0'; } @@ -780,7 +785,7 @@ function compileAnd(state, conds) { if (!conds) { return '1'; } - let res = compileConditions(state, conds); + const res = compileConditions(state, conds); if (res.length === 0) { return '1'; } @@ -792,14 +797,14 @@ const compileWhere = saveStack('filter', (state, conds) => { }); function compileJoins(state, tableRef, internalTableFilters) { - let joins = []; + const joins = []; state.paths.forEach((desc, path) => { - let { tableName, tableId, joinField, joinTable, noMapping } = + const { tableName, tableId, joinField, joinTable, noMapping } = state.paths.get(path); let on = `${tableId}.id = ${tableRef(joinTable)}.${quoteAlias(joinField)}`; - let filters = internalTableFilters(tableName); + const filters = internalTableFilters(tableName); if (filters.length > 0) { on += ' AND ' + @@ -831,12 +836,12 @@ function expandStar(state, expr) { tableId: state.implicitTableId, }; } else if (expr.match(/\.\*$/)) { - let result = popPath(expr); + const result = popPath(expr); path = result.path; pathInfo = resolvePath(state, result.path); } - let table = state.schema[pathInfo.tableName]; + const table = state.schema[pathInfo.tableName]; if (table == null) { throw new Error(`Table “${pathInfo.tableName}” does not exist`); } @@ -852,26 +857,26 @@ const compileSelect = saveStack( exprs = exprs.concat(['id']); } - let select = exprs.map(expr => { + const select = exprs.map(expr => { if (typeof expr === 'string') { if (expr.indexOf('*') !== -1) { - let fields = expandStar(state, expr); + const fields = expandStar(state, expr); return fields .map(field => { - let compiled = compileExpr(state, '$' + field); + const compiled = compileExpr(state, '$' + field); state.outputTypes.set(field, compiled.type); return compiled.value + ' AS ' + quoteAlias(field); }) .join(', '); } - let compiled = compileExpr(state, '$' + expr); + const compiled = compileExpr(state, '$' + expr); state.outputTypes.set(expr, compiled.type); return compiled.value + ' AS ' + quoteAlias(expr); } - let [name, value] = Object.entries(expr)[0]; + const [name, value] = Object.entries(expr)[0]; if (name[0] === '$') { state.compileStack.push({ type: 'value', value: expr }); throw new CompileError( @@ -880,12 +885,12 @@ const compileSelect = saveStack( } if (typeof value === 'string') { - let compiled = compileExpr(state, '$' + value); + const compiled = compileExpr(state, '$' + value); state.outputTypes.set(name, compiled.type); return `${compiled.value} AS ${quoteAlias(name)}`; } - let compiled = compileFunction({ ...state, orders }, value); + const compiled = compileFunction({ ...state, orders }, value); state.outputTypes.set(name, compiled.type); return compiled.value + ` AS ${quoteAlias(name)}`; }); @@ -895,7 +900,7 @@ const compileSelect = saveStack( ); const compileGroupBy = saveStack('groupBy', (state, exprs) => { - let groupBy = exprs.map(expr => { + const groupBy = exprs.map(expr => { if (typeof expr === 'string') { return compileExpr(state, '$' + expr).value; } @@ -907,15 +912,15 @@ const compileGroupBy = saveStack('groupBy', (state, exprs) => { }); const compileOrderBy = saveStack('orderBy', (state, exprs) => { - let orderBy = exprs.map(expr => { + const orderBy = exprs.map(expr => { let compiled; let dir = null; if (typeof expr === 'string') { compiled = compileExpr(state, '$' + expr).value; } else { - let entries = Object.entries(expr); - let entry = entries[0]; + const entries = Object.entries(expr); + const entry = entries[0]; // Check if this is a field reference if (entries.length === 1 && entry[0][0] !== '$') { @@ -923,7 +928,7 @@ const compileOrderBy = saveStack('orderBy', (state, exprs) => { compiled = compileExpr(state, '$' + entry[0]).value; } else { // Otherwise it's a function - let { $dir, ...func } = expr; + const { $dir, ...func } = expr; dir = $dir; compiled = compileFunction(state, func).value; } @@ -941,13 +946,14 @@ const compileOrderBy = saveStack('orderBy', (state, exprs) => { return orderBy.join(', '); }); -let AGGREGATE_FUNCTIONS = ['$sum', '$count']; +const AGGREGATE_FUNCTIONS = ['$sum', '$count']; function isAggregateFunction(expr) { if (typeof expr !== 'object' || Array.isArray(expr)) { return false; } - let [name, argExprs] = Object.entries(expr)[0]; + const [name, originalArgExprs] = Object.entries(expr)[0]; + let argExprs = originalArgExprs; if (!Array.isArray(argExprs)) { argExprs = [argExprs]; } @@ -970,7 +976,7 @@ export function isAggregateQuery(queryState) { return !!queryState.selectExpressions.find(expr => { if (typeof expr !== 'string') { - let [_, value] = Object.entries(expr)[0]; + const [_, value] = Object.entries(expr)[0]; return isAggregateFunction(value); } return false; @@ -987,18 +993,18 @@ export function compileQuery( schema, schemaConfig: SchemaConfig = {}, ) { - let { withDead, validateRefs = true, tableOptions, rawMode } = queryState; + const { withDead, validateRefs = true, tableOptions, rawMode } = queryState; - let { + const { tableViews = {}, tableFilters = name => [], customizeQuery = queryState => queryState, } = schemaConfig; - let internalTableFilters = name => { - let filters = tableFilters(name); + const internalTableFilters = name => { + const filters = tableFilters(name); // These filters cannot join tables and must be simple strings - for (let filter of filters) { + for (const filter of filters) { if (Array.isArray(filter)) { throw new CompileError( 'Invalid internal table filter: only object filters are supported', @@ -1013,17 +1019,17 @@ export function compileQuery( return filters; }; - let tableRef = (name: string, isJoin?: boolean) => { - let view = + const tableRef = (name: string, isJoin?: boolean) => { + const view = typeof tableViews === 'function' ? tableViews(name, { withDead, isJoin, tableOptions }) : tableViews[name]; return view || name; }; - let tableName = queryState.table; + const tableName = queryState.table; - let { + const { filterExpressions, selectExpressions, groupExpressions, @@ -1037,7 +1043,7 @@ export function compileQuery( let joins = ''; let groupBy = ''; let orderBy = ''; - let state = { + const state = { schema, implicitTableName: tableName, implicitTableId: tableRef(tableName), @@ -1060,27 +1066,27 @@ export function compileQuery( ); if (filterExpressions.length > 0) { - let result = compileWhere(state, filterExpressions); + const result = compileWhere(state, filterExpressions); where = 'WHERE ' + result; } else { where = 'WHERE 1'; } if (!rawMode) { - let filters = internalTableFilters(tableName); + const filters = internalTableFilters(tableName); if (filters.length > 0) { where += ' AND ' + compileAnd(state, filters); } } if (groupExpressions.length > 0) { - let result = compileGroupBy(state, groupExpressions); + const result = compileGroupBy(state, groupExpressions); groupBy = 'GROUP BY ' + result; } // Orders don't matter if doing a single calculation if (orderExpressions.length > 0) { - let result = compileOrderBy(state, orderExpressions); + const result = compileOrderBy(state, orderExpressions); orderBy = 'ORDER BY ' + result; } @@ -1095,7 +1101,7 @@ export function compileQuery( throw e; } - let sqlPieces = { + const sqlPieces = { select, from: tableRef(tableName), joins, @@ -1113,9 +1119,9 @@ export function compileQuery( } export function defaultConstructQuery(queryState, state, sqlPieces) { - let s = sqlPieces; + const s = sqlPieces; - let where = queryState.withDead + const where = queryState.withDead ? s.where : addTombstone( state.schema, @@ -1140,6 +1146,6 @@ export function generateSQLWithState( schema?: unknown, schemaConfig?: unknown, ) { - let { sqlPieces, state } = compileQuery(queryState, schema, schemaConfig); + const { sqlPieces, state } = compileQuery(queryState, schema, schemaConfig); return { sql: defaultConstructQuery(queryState, state, sqlPieces), state }; } diff --git a/packages/loot-core/src/server/aql/exec.test.ts b/packages/loot-core/src/server/aql/exec.test.ts index f4fc8cf8d59..309f5d8af14 100644 --- a/packages/loot-core/src/server/aql/exec.test.ts +++ b/packages/loot-core/src/server/aql/exec.test.ts @@ -23,28 +23,28 @@ function runQuery(query, options?: unknown) { async function insertTransactions(repeatTimes = 1) { let transactions = []; - let group = await db.insertCategoryGroup({ name: 'group' }); + const group = await db.insertCategoryGroup({ name: 'group' }); for (let i = 0; i < repeatTimes; i++) { - let cat1 = await db.insertCategory({ + const cat1 = await db.insertCategory({ id: 'cat' + i + 'a', name: 'cat' + i + 'a', cat_group: group, }); - let cat2 = await db.insertCategory({ + const cat2 = await db.insertCategory({ id: 'cat' + i + 'b', name: 'cat' + i + 'b', cat_group: group, }); - let parent = { + const parent = { id: uuidv4(), account: 'acct', date: '2020-01-04', amount: -100, is_parent: true, }; - let parent2 = { + const parent2 = { id: uuidv4(), account: 'acct', date: '2020-01-01', @@ -65,7 +65,7 @@ async function insertTransactions(repeatTimes = 1) { ]); } - for (let trans of transactions) { + for (const trans of transactions) { await db.insertTransaction(trans); } } @@ -116,7 +116,7 @@ describe('runQuery', () => { }); it('provides named parameters and converts types', async () => { - let transId = uuidv4(); + const transId = uuidv4(); await db.insertTransaction({ id: transId, account: 'acct', @@ -171,20 +171,20 @@ describe('runQuery', () => { it('allows null as a parameter', async () => { await db.insertCategoryGroup({ id: 'group', name: 'group' }); await db.insertCategory({ id: 'cat', name: 'cat', cat_group: 'group' }); - let transNoCat = await db.insertTransaction({ + const transNoCat = await db.insertTransaction({ account: 'acct', date: '2020-01-01', amount: -5001, category: null, }); - let transCat = await db.insertTransaction({ + const transCat = await db.insertTransaction({ account: 'acct', date: '2020-01-01', amount: -5001, category: 'cat', }); - let queryState = query('transactions') + const queryState = query('transactions') .filter({ category: ':category' }) .select() .serialize(); @@ -197,7 +197,7 @@ describe('runQuery', () => { }); it('parameters have the correct order', async () => { - let transId = uuidv4(); + const transId = uuidv4(); await db.insertTransaction({ id: transId, account: 'acct', @@ -205,7 +205,7 @@ describe('runQuery', () => { amount: -5001, }); - let { data } = await runQuery( + const { data } = await runQuery( query('transactions') .filter({ amount: { $lt: { $neg: ':amount' } }, @@ -221,11 +221,11 @@ describe('runQuery', () => { it('fetches all data required for $oneof', async () => { await insertTransactions(); - let rows = await db.all('SELECT id FROM transactions WHERE amount < -50'); - let ids = rows.slice(0, 3).map(row => row.id); + const rows = await db.all('SELECT id FROM transactions WHERE amount < -50'); + const ids = rows.slice(0, 3).map(row => row.id); ids.sort(); - let { data } = await runQuery( + const { data } = await runQuery( query('transactions') .filter({ id: { $oneof: repeat(ids, 1000) }, amount: { $lt: 50 } }) .select('id') diff --git a/packages/loot-core/src/server/aql/exec.ts b/packages/loot-core/src/server/aql/exec.ts index afc81fd43aa..9f0c4aa36ce 100644 --- a/packages/loot-core/src/server/aql/exec.ts +++ b/packages/loot-core/src/server/aql/exec.ts @@ -17,7 +17,7 @@ import { convertInputType, convertOutputType } from './schema-helpers'; function applyTypes(data, outputTypes) { for (let i = 0; i < data.length; i++) { - let item = data[i]; + const item = data[i]; Object.keys(item).forEach(name => { item[name] = convertOutputType(item[name], outputTypes.get(name)); }); @@ -31,8 +31,8 @@ export async function execQuery( params, outputTypes, ) { - let sql = defaultConstructQuery(queryState, state, sqlPieces); - let data = await db.all(sql, params); + const sql = defaultConstructQuery(queryState, state, sqlPieces); + const data = await db.all(sql, params); applyTypes(data, outputTypes); return data; } @@ -43,8 +43,8 @@ export async function runCompiledQuery( state, { params = {}, executors = {} } = {}, ) { - let paramArray = state.namedParameters.map(param => { - let name = param.paramName; + const paramArray = state.namedParameters.map(param => { + const name = param.paramName; if (params[name] === undefined) { throw new Error(`Parameter ${name} not provided to query`); } @@ -66,8 +66,8 @@ export async function runCompiledQuery( if (query.calculation) { if (data.length > 0) { - let row = data[0]; - let k = Object.keys(row)[0]; + const row = data[0]; + const k = Object.keys(row)[0]; // TODO: the function being run should be the one to // determine the default value, not hardcoded as 0 data = row[k] || 0; @@ -79,7 +79,7 @@ export async function runCompiledQuery( } export async function runQuery(schema, schemaConfig, query, options) { - let { sqlPieces, state } = compileQuery(query, schema, schemaConfig); - let data = await runCompiledQuery(query, sqlPieces, state, options); + const { sqlPieces, state } = compileQuery(query, schema, schemaConfig); + const data = await runCompiledQuery(query, sqlPieces, state, options); return { data, dependencies: state.dependencies }; } diff --git a/packages/loot-core/src/server/aql/schema-helpers.test.ts b/packages/loot-core/src/server/aql/schema-helpers.test.ts index 8be75b9f277..40239930e3a 100644 --- a/packages/loot-core/src/server/aql/schema-helpers.test.ts +++ b/packages/loot-core/src/server/aql/schema-helpers.test.ts @@ -5,7 +5,7 @@ import { convertFromSelect, } from './schema-helpers'; -let basicSchema = { +const basicSchema = { transactions: { id: { type: 'id' }, date: { type: 'date', required: true }, @@ -19,7 +19,7 @@ let basicSchema = { describe('schema-helpers', () => { test('select converts field types', () => { - let trans = convertFromSelect(basicSchema, {}, 'transactions', { + const trans = convertFromSelect(basicSchema, {}, 'transactions', { amount: 5, cleared: 0, date: 20200101, @@ -32,7 +32,7 @@ describe('schema-helpers', () => { }); test('a basic insert works', () => { - let trans = convertForInsert(basicSchema, {}, 'transactions', { + const trans = convertForInsert(basicSchema, {}, 'transactions', { id: 't1', account: 'foo', amount: 5, @@ -49,7 +49,7 @@ describe('schema-helpers', () => { }); test('a basic update works', () => { - let trans = convertForUpdate(basicSchema, {}, 'transactions', { + const trans = convertForUpdate(basicSchema, {}, 'transactions', { id: 'foo', amount: 5001, }); @@ -120,7 +120,7 @@ describe('schema-helpers', () => { }); test('conform converts types to db representations', () => { - let obj = conform(basicSchema, {}, 'transactions', { + const obj = conform(basicSchema, {}, 'transactions', { date: '2020-01-01', cleared: false, }); @@ -129,7 +129,7 @@ describe('schema-helpers', () => { }); test('conform renames fields', () => { - let obj = conform( + const obj = conform( basicSchema, { views: { diff --git a/packages/loot-core/src/server/aql/schema-helpers.ts b/packages/loot-core/src/server/aql/schema-helpers.ts index 33dd90f7edc..6ad2c96a53c 100644 --- a/packages/loot-core/src/server/aql/schema-helpers.ts +++ b/packages/loot-core/src/server/aql/schema-helpers.ts @@ -91,15 +91,15 @@ export function conform( obj, { skipNull = false } = {}, ) { - let tableSchema = schema[table]; + const tableSchema = schema[table]; if (tableSchema == null) { throw new Error(`Table “${table}” does not exist`); } - let views = schemaConfig.views || {}; + const views = schemaConfig.views || {}; // Rename fields if necessary - let fieldRef = field => { + const fieldRef = field => { if (views[table] && views[table].fields) { return views[table].fields[field] || field; } @@ -114,7 +114,7 @@ export function conform( return null; } - let fieldDesc = tableSchema[field]; + const fieldDesc = tableSchema[field]; if (fieldDesc == null) { throw new Error( `Field “${field}” does not exist on table ${table}: ${JSON.stringify( @@ -143,9 +143,9 @@ export function conform( } export function convertForInsert(schema, schemaConfig, table, rawObj) { - let obj = { ...rawObj }; + const obj = { ...rawObj }; - let tableSchema = schema[table]; + const tableSchema = schema[table]; if (tableSchema == null) { throw new Error(`Error inserting: table “${table}” does not exist`); } @@ -153,7 +153,7 @@ export function convertForInsert(schema, schemaConfig, table, rawObj) { // Inserting checks all the fields in the table and adds any default // values necessary Object.keys(tableSchema).forEach(field => { - let fieldDesc = tableSchema[field]; + const fieldDesc = tableSchema[field]; if (obj[field] == null) { if (fieldDesc.default !== undefined) { @@ -179,9 +179,9 @@ export function convertForInsert(schema, schemaConfig, table, rawObj) { } export function convertForUpdate(schema, schemaConfig, table, rawObj) { - let obj = { ...rawObj }; + const obj = { ...rawObj }; - let tableSchema = schema[table]; + const tableSchema = schema[table]; if (tableSchema == null) { throw new Error(`Error updating: table “${table}” does not exist`); } @@ -190,16 +190,16 @@ export function convertForUpdate(schema, schemaConfig, table, rawObj) { } export function convertFromSelect(schema, schemaConfig, table, obj) { - let tableSchema = schema[table]; + const tableSchema = schema[table]; if (tableSchema == null) { throw new Error(`Table “${table}” does not exist`); } - let fields = Object.keys(tableSchema); - let result = {}; + const fields = Object.keys(tableSchema); + const result = {}; for (let i = 0; i < fields.length; i++) { - let fieldName = fields[i]; - let fieldDesc = tableSchema[fieldName]; + const fieldName = fields[i]; + const fieldDesc = tableSchema[fieldName]; result[fieldName] = convertOutputType(obj[fieldName], fieldDesc.type); } diff --git a/packages/loot-core/src/server/aql/schema/executors.test.ts b/packages/loot-core/src/server/aql/schema/executors.test.ts index 720fc932a02..3e588144f54 100644 --- a/packages/loot-core/src/server/aql/schema/executors.test.ts +++ b/packages/loot-core/src/server/aql/schema/executors.test.ts @@ -22,20 +22,20 @@ function repeat(arr, times) { function isAlive(trans, allById) { if (trans.parent_id) { - let parent = allById[trans.parent_id]; + const parent = allById[trans.parent_id]; return !trans.tombstone && parent && !parent.tombstone; } return !trans.tombstone; } function aliveTransactions(arr) { - let all = groupById(arr); + const all = groupById(arr); return arr.filter(t => isAlive(t, all)); } async function insertTransactions(transactions, payeeIds?: string[]) { return batchMessages(async () => { - for (let trans of transactions) { + for (const trans of transactions) { db.insertTransaction(trans); } @@ -61,19 +61,19 @@ function expectTransactionOrder( 'id', ]; - let sorted = [...data].sort((i1, i2) => { + const sorted = [...data].sort((i1, i2) => { for (let field of expectedFields) { let order = 'asc'; if (!(typeof field === 'string')) { - let entries = Object.entries(field)[0]; + const entries = Object.entries(field)[0]; field = entries[0]; order = entries[1]; } - let f1 = i1[field]; - let f2 = i2[field]; - let before = order === 'asc' ? -1 : 1; - let after = order === 'asc' ? 1 : -1; + const f1 = i1[field]; + const f2 = i2[field]; + const before = order === 'asc' ? -1 : 1; + const after = order === 'asc' ? 1 : -1; expect(f1).not.toBeUndefined(); expect(f2).not.toBeUndefined(); @@ -95,7 +95,7 @@ function expectTransactionOrder( } async function expectPagedData(query, numTransactions, allData) { - let pageCount = Math.max(Math.floor(numTransactions / 3), 3); + const pageCount = Math.max(Math.floor(numTransactions / 3), 3); let pagedData = []; let done = false; @@ -106,7 +106,7 @@ async function expectPagedData(query, numTransactions, allData) { expect(i).toBeLessThanOrEqual(100); // Pull in all the data via pages - let { data } = await runQuery( + const { data } = await runQuery( query.limit(pageCount).offset(pagedData.length).serialize(), ); @@ -138,7 +138,7 @@ describe('transaction executors', () => { async arr => { await insertTransactions(arr); - let { data } = await runQuery( + const { data } = await runQuery( query('transactions') .filter({ amount: { $lt: 0 } }) .select('*') @@ -149,7 +149,7 @@ describe('transaction executors', () => { expect(data.filter(t => t.is_parent).length).toBe(0); expect(data.filter(t => t.tombstone).length).toBe(0); - let { data: defaultData } = await runQuery( + const { data: defaultData } = await runQuery( query('transactions') .filter({ amount: { $lt: 0 } }) .select('*') @@ -175,7 +175,7 @@ describe('transaction executors', () => { async arr => { await insertTransactions(arr); - let { data } = await runQuery( + const { data } = await runQuery( query('transactions') .filter({ amount: { $lt: 0 } }) .select('*') @@ -191,7 +191,7 @@ describe('transaction executors', () => { }); it('aggregate queries work with `splits: grouped`', async () => { - let payeeIds = ['payee1', 'payee2', 'payee3', 'payee4', 'payee5']; + const payeeIds = ['payee1', 'payee2', 'payee3', 'payee4', 'payee5']; await fc.assert( fc @@ -200,7 +200,7 @@ describe('transaction executors', () => { async arr => { await insertTransactions(arr, payeeIds); - let aggQuery = query('transactions') + const aggQuery = query('transactions') .filter({ $or: [{ amount: { $lt: -5 } }, { amount: { $gt: -2 } }], 'payee.name': { $gt: '' }, @@ -208,11 +208,12 @@ describe('transaction executors', () => { .options({ splits: 'grouped' }) .calculate({ $sum: '$amount' }); - let { data } = await runQuery(aggQuery.serialize()); + const { data } = await runQuery(aggQuery.serialize()); - let sum = aliveTransactions(arr).reduce((sum, trans) => { - let amount = trans.amount || 0; - let matched = (amount < -5 || amount > -2) && trans.payee != null; + const sum = aliveTransactions(arr).reduce((sum, trans) => { + const amount = trans.amount || 0; + const matched = + (amount < -5 || amount > -2) && trans.payee != null; if (!trans.tombstone && !trans.is_parent && matched) { return sum + amount; } @@ -235,40 +236,40 @@ describe('transaction executors', () => { }); function runTest(makeQuery) { - let payeeIds = ['payee1', 'payee2', 'payee3', 'payee4', 'payee5']; + const payeeIds = ['payee1', 'payee2', 'payee3', 'payee4', 'payee5']; async function check(arr) { - let orderFields = ['payee.name', 'amount', 'id']; + const orderFields = ['payee.name', 'amount', 'id']; // Insert transactions and get a list of all the alive // ones to make it easier to check the data later (don't // have to always be filtering out dead ones) await insertTransactions(arr, payeeIds); - let allTransactions = aliveTransactions(arr); + const allTransactions = aliveTransactions(arr); // Query time - let { query, expectedIds, expectedMatchedIds } = makeQuery(arr); + const { query, expectedIds, expectedMatchedIds } = makeQuery(arr); // First to a query without order to make sure the default // order works - let { data: defaultOrderData } = await runQuery(query.serialize()); + const { data: defaultOrderData } = await runQuery(query.serialize()); expectTransactionOrder(defaultOrderData); expect(new Set(defaultOrderData.map(t => t.id))).toEqual(expectedIds); // Now do the full test, and add a custom order to make // sure that doesn't effect anything - let orderedQuery = query.orderBy(orderFields); - let { data } = await runQuery(orderedQuery.serialize()); + const orderedQuery = query.orderBy(orderFields); + const { data } = await runQuery(orderedQuery.serialize()); expect(new Set(data.map(t => t.id))).toEqual(expectedIds); // Validate paging and ordering await expectPagedData(orderedQuery, arr.length, data); expectTransactionOrder(data, orderFields); - let matchedIds = new Set(); + const matchedIds = new Set(); // Check that all the subtransactions were returned - for (let trans of data) { + for (const trans of data) { expect(trans.tombstone).toBe(false); if (expectedMatchedIds) { @@ -338,14 +339,14 @@ describe('transaction executors', () => { it('queries the correct transactions without filters', async () => { return runTest(arr => { - let expectedIds = new Set( + const expectedIds = new Set( arr.filter(t => !t.tombstone && !t.is_child).map(t => t.id), ); // Even though we're applying some filters, these are always // guaranteed to return the full split transaction so they // should take the optimized path - let happyQuery = query('transactions') + const happyQuery = query('transactions') .filter({ date: { $gt: '2017-01-01' }, }) @@ -364,21 +365,21 @@ describe('transaction executors', () => { it(`queries the correct transactions with a filter`, async () => { return runTest(arr => { - let expectedIds = new Set(); + const expectedIds = new Set(); // let parents = toGroup( // arr.filter(t => t.is_parent), // new Map(Object.entries(groupById(arr.filter(t => t.parent_id)))) // ); - let parents = groupById(arr.filter(t => t.is_parent && !t.tombstone)); - let matched = new Set(); + const parents = groupById(arr.filter(t => t.is_parent && !t.tombstone)); + const matched = new Set(); // Pick out some ids to query let ids = arr.reduce((ids, trans, idx) => { if (idx % 2 === 0) { - let amount = trans.amount == null ? 0 : trans.amount; - let matches = (amount < -2 || amount > -1) && trans.payee > ''; + const amount = trans.amount == null ? 0 : trans.amount; + const matches = (amount < -2 || amount > -1) && trans.payee > ''; if (matches && isAlive(trans, parents)) { expectedIds.add(trans.parent_id || trans.id); @@ -394,7 +395,7 @@ describe('transaction executors', () => { // Because why not? It should deduplicate them ids = repeat(ids, 100); - let unhappyQuery = query('transactions') + const unhappyQuery = query('transactions') .filter({ id: [{ $oneof: ids }], payee: { $gt: '' }, diff --git a/packages/loot-core/src/server/aql/schema/executors.ts b/packages/loot-core/src/server/aql/schema/executors.ts index f6fa0d17d5a..037d84e192e 100644 --- a/packages/loot-core/src/server/aql/schema/executors.ts +++ b/packages/loot-core/src/server/aql/schema/executors.ts @@ -8,7 +8,7 @@ import { convertOutputType } from '../schema-helpers'; function toGroup(parents, children, mapper = x => x) { return parents.reduce((list, parent) => { - let childs = children.get(parent.id) || []; + const childs = children.get(parent.id) || []; list.push({ ...mapper(parent), subtransactions: childs.map(mapper), @@ -27,8 +27,8 @@ function toGroup(parents, children, mapper = x => x) { // (or non-split) transactions function execTransactions(state, query, sql, params, outputTypes) { - let tableOptions = query.tableOptions || {}; - let splitType = tableOptions.splits || 'inline'; + const tableOptions = query.tableOptions || {}; + const splitType = tableOptions.splits || 'inline'; if (['all', 'inline', 'none', 'grouped'].indexOf(splitType) === -1) { throw new Error(`Invalid “splits” option for transactions: “${splitType}”`); @@ -58,7 +58,7 @@ function execTransactions(state, query, sql, params, outputTypes) { function _isUnhappy(filter) { // These fields can be filtered - all split transactions will // still be returned regardless - for (let key of Object.keys(filter)) { + for (const key of Object.keys(filter)) { if (key === '$or' || key === '$and') { if (filter[key] && _isUnhappy(filter[key])) { return true; @@ -82,8 +82,8 @@ async function execTransactionsGrouped( splitType, outputTypes, ) { - let { withDead } = queryState; - let whereDead = withDead ? '' : `AND ${sql.from}.tombstone = 0`; + const { withDead } = queryState; + const whereDead = withDead ? '' : `AND ${sql.from}.tombstone = 0`; // Aggregate queries don't make sense for a grouped transactions // query. We never should include both parent and children @@ -91,7 +91,7 @@ async function execTransactionsGrouped( // would never make sense. In this case, switch back to the "inline" // type where only non-parent transactions are considered if (isAggregateQuery(queryState)) { - let s = { ...sql }; + const s = { ...sql }; // Modify the where to only include non-parents s.where = `${s.where} AND ${s.from}.is_parent = 0`; @@ -115,7 +115,7 @@ async function execTransactionsGrouped( if (isHappyPathQuery(queryState)) { // This is just an optimization - we can just filter out children // directly and only list parents - let rowSql = ` + const rowSql = ` SELECT ${sql.from}.id as group_id FROM ${sql.from} ${sql.joins} @@ -129,7 +129,7 @@ async function execTransactionsGrouped( // TODO: phew, what a doozy. write docs why it works this way // // prettier-ignore - let rowSql = ` + const rowSql = ` SELECT group_id, matched FROM ( SELECT group_id, @@ -159,29 +159,29 @@ async function execTransactionsGrouped( ); } - let where = whereIn( + const where = whereIn( rows.map(row => row.group_id), `IFNULL(${sql.from}.parent_id, ${sql.from}.id)`, ); - let finalSql = ` + const finalSql = ` SELECT ${sql.select}, parent_id AS _parent_id FROM ${sql.from} ${sql.joins} WHERE ${where} ${whereDead} ${sql.orderBy} `; - let allRows = await db.all(finalSql); + const allRows = await db.all(finalSql); // Group the parents and children up - let { parents, children } = allRows.reduce( + const { parents, children } = allRows.reduce( (acc, trans) => { - let pid = trans._parent_id; + const pid = trans._parent_id; delete trans._parent_id; if (pid == null) { acc.parents.push(trans); } else { - let arr = acc.children.get(pid) || []; + const arr = acc.children.get(pid) || []; arr.push(trans); acc.children.set(pid, arr); } @@ -190,7 +190,7 @@ async function execTransactionsGrouped( { parents: [], children: new Map() }, ); - let mapper = trans => { + const mapper = trans => { Object.keys(trans).forEach(name => { trans[name] = convertOutputType(trans[name], outputTypes.get(name)); }); @@ -212,7 +212,7 @@ async function execTransactionsBasic( splitType, outputTypes, ) { - let s = { ...sql }; + const s = { ...sql }; if (splitType !== 'all') { if (splitType === 'none') { diff --git a/packages/loot-core/src/server/aql/schema/index.ts b/packages/loot-core/src/server/aql/schema/index.ts index 873e3594f32..26af5205b07 100644 --- a/packages/loot-core/src/server/aql/schema/index.ts +++ b/packages/loot-core/src/server/aql/schema/index.ts @@ -153,7 +153,7 @@ export const schemaConfig = { return 'v_transactions_internal_alive'; } - let splitType = tableOptions.splits || 'inline'; + const splitType = tableOptions.splits || 'inline'; // Use the view to exclude dead transactions if using `inline` or `none` if (!withDead && (splitType === 'inline' || splitType === 'none')) { return 'v_transactions_internal_alive'; @@ -179,7 +179,7 @@ export const schemaConfig = { }, customizeQuery(queryState) { - let { table: tableName } = queryState; + const { table: tableName } = queryState; function orderBy(orders) { // If order was specified, always add id as the last sort to make @@ -221,7 +221,7 @@ export const schemaConfig = { views: { payees: { v_payees: internalFields => { - let fields = internalFields({ + const fields = internalFields({ name: 'COALESCE(__accounts.name, _.name)', }); @@ -241,7 +241,7 @@ export const schemaConfig = { }, v_categories: internalFields => { - let fields = internalFields({ group: 'cat_group' }); + const fields = internalFields({ group: 'cat_group' }); return `SELECT ${fields} FROM categories _`; }, }, @@ -249,7 +249,7 @@ export const schemaConfig = { schedules: { v_schedules: internalFields => { /* eslint-disable rulesdir/typography */ - let fields = internalFields({ + const fields = internalFields({ next_date: ` CASE WHEN _nd.local_next_date_ts = _nd.base_next_date_ts THEN _nd.local_next_date @@ -289,7 +289,7 @@ export const schemaConfig = { v_transactions_internal: internalFields => { // Override some fields to make custom stuff - let fields = internalFields({ + const fields = internalFields({ payee: 'pm.targetId', category: `CASE WHEN _.isParent = 1 THEN NULL ELSE cm.transferId END`, amount: `IFNULL(_.amount, 0)`, @@ -317,7 +317,7 @@ export const schemaConfig = { `, v_transactions: (_, publicFields) => { - let fields = publicFields({ + const fields = publicFields({ payee: 'p.id', category: 'c.id', account: 'a.id', diff --git a/packages/loot-core/src/server/aql/views.test.ts b/packages/loot-core/src/server/aql/views.test.ts index 045607ba0f6..e87760fcb70 100644 --- a/packages/loot-core/src/server/aql/views.test.ts +++ b/packages/loot-core/src/server/aql/views.test.ts @@ -20,7 +20,7 @@ const schemaConfig = { }, v_transactions1: internalFields => { - let fields = internalFields({ + const fields = internalFields({ transfer_id: 'CASE WHEN amount < 4 THEN null ELSE transfer_id END', }); @@ -28,7 +28,7 @@ const schemaConfig = { }, v_transactions2: (_, publicFields) => { - let fields = publicFields({ + const fields = publicFields({ // eslint-disable-next-line rulesdir/typography transfer_id: 'COERCE(transfer_id, "foo")', }); @@ -41,7 +41,7 @@ const schemaConfig = { describe('schema views', () => { test('generates views with all the right fields', () => { - let str = makeViews(schema, schemaConfig); + const str = makeViews(schema, schemaConfig); expect(str).toMatch('DROP VIEW IF EXISTS v_transactions1;'); expect(str).toMatch( 'CREATE VIEW v_transactions1 AS SELECT _.id, _.a_mo_unt AS amount, CASE WHEN amount < 4 THEN null ELSE transfer_id END AS transfer_id FROM transactions;', diff --git a/packages/loot-core/src/server/aql/views.ts b/packages/loot-core/src/server/aql/views.ts index 31f21269c1c..d47c25c24f4 100644 --- a/packages/loot-core/src/server/aql/views.ts +++ b/packages/loot-core/src/server/aql/views.ts @@ -4,7 +4,7 @@ function selectFields(fields) { return Object.keys(fields) .map(as => { let field = fields[as]; - let needsAs = field !== as; + const needsAs = field !== as; // If it's just an identifier, we automatically prefix it with // `_.` which makes sure it references the root table if (!field.match(/[ .]/)) { @@ -16,24 +16,24 @@ function selectFields(fields) { } export function makeViews(schema, schemaConfig) { - let views = schemaConfig.views; - let viewStrs = []; + const views = schemaConfig.views; + const viewStrs = []; Object.keys(views).forEach(table => { - let { fields: fieldMappings = {}, ...tableViews } = views[table]; + const { fields: fieldMappings = {}, ...tableViews } = views[table]; - let publicFields = Object.fromEntries( + const publicFields = Object.fromEntries( Object.keys(schema[table]).map(name => [name, name]), ); - let internalFields = { ...publicFields, ...fieldMappings }; + const internalFields = { ...publicFields, ...fieldMappings }; Object.keys(tableViews).forEach(viewName => { - let publicMaker = overrides => { - let fields = { ...publicFields, ...overrides }; + const publicMaker = overrides => { + const fields = { ...publicFields, ...overrides }; return selectFields(fields); }; - let internalMaker = overrides => { - let fields = { ...internalFields, ...overrides }; + const internalMaker = overrides => { + const fields = { ...internalFields, ...overrides }; return selectFields(fields); }; diff --git a/packages/loot-core/src/server/backups.ts b/packages/loot-core/src/server/backups.ts index 713e2b3fe5a..509ae831e09 100644 --- a/packages/loot-core/src/server/backups.ts +++ b/packages/loot-core/src/server/backups.ts @@ -63,9 +63,9 @@ async function getLatestBackup(id: string): Promise { } export async function getAvailableBackups(id: string): Promise { - let backups = await getBackups(id); + const backups = await getBackups(id); - let latestBackup = await getLatestBackup(id); + const latestBackup = await getLatestBackup(id); if (latestBackup) { backups.unshift(latestBackup); } @@ -85,13 +85,13 @@ export async function updateBackups(backups) { }, {}); const removed = []; - for (let day of Object.keys(byDay)) { + for (const day of Object.keys(byDay)) { const dayBackups = byDay[day]; const isToday = day === monthUtils.currentDay(); // Allow 3 backups of the current day (so fine-grained edits are // kept around). Otherwise only keep around one backup per day. // And only keep a total of 10 backups. - for (let backup of dayBackups.slice(isToday ? 3 : 1)) { + for (const backup of dayBackups.slice(isToday ? 3 : 1)) { removed.push(backup.id); } } @@ -111,8 +111,8 @@ export async function makeBackup(id: string) { await fs.removeFile(fs.join(fs.getBudgetDir(id), LATEST_BACKUP_FILENAME)); } - let backupId = `${uuidv4()}.sqlite`; - let backupPath = fs.join(budgetDir, 'backups', backupId); + const backupId = `${uuidv4()}.sqlite`; + const backupPath = fs.join(budgetDir, 'backups', backupId); if (!(await fs.exists(fs.join(budgetDir, 'backups')))) { await fs.mkdir(fs.join(budgetDir, 'backups')); @@ -127,7 +127,7 @@ export async function makeBackup(id: string) { sqlite.closeDatabase(db); const toRemove = await updateBackups(await getBackups(id)); - for (let id of toRemove) { + for (const id of toRemove) { await fs.removeFile(fs.join(budgetDir, 'backups', id)); } diff --git a/packages/loot-core/src/server/budget/actions.ts b/packages/loot-core/src/server/budget/actions.ts index 4258f129966..15c481ab5c0 100644 --- a/packages/loot-core/src/server/budget/actions.ts +++ b/packages/loot-core/src/server/budget/actions.ts @@ -27,12 +27,12 @@ function calcBufferedAmount( } function getBudgetTable(): string { - let { budgetType } = prefs.getPrefs() || {}; + const { budgetType } = prefs.getPrefs() || {}; return budgetType === 'report' ? 'reflect_budgets' : 'zero_budgets'; } export function isReflectBudget(): boolean { - let { budgetType } = prefs.getPrefs(); + const { budgetType } = prefs.getPrefs(); return budgetType === 'report'; } @@ -59,9 +59,9 @@ function getBudgetData(table: string, month: string): Promise { } function getAllMonths(startMonth: string): string[] { - let { createdMonths } = sheet.get().meta(); + const { createdMonths } = sheet.get().meta(); let latest = null; - for (let month of createdMonths) { + for (const month of createdMonths) { if (latest == null || month > latest) { latest = month; } @@ -78,8 +78,8 @@ export function getBudget({ category: string; month: string; }): number { - let table = getBudgetTable(); - let existing = db.firstSync( + const table = getBudgetTable(); + const existing = db.firstSync( `SELECT * FROM ${table} WHERE month = ? AND category = ?`, [dbMonth(month), category], ); @@ -98,7 +98,7 @@ export function setBudget({ amount = safeNumber(typeof amount === 'number' ? amount : 0); const table = getBudgetTable(); - let existing = db.firstSync( + const existing = db.firstSync( `SELECT id FROM ${table} WHERE month = ? AND category = ?`, [dbMonth(month), category], ); @@ -115,7 +115,7 @@ export function setBudget({ export function setGoal({ month, category, goal }): Promise { const table = getBudgetTable(); - let existing = db.firstSync( + const existing = db.firstSync( `SELECT id FROM ${table} WHERE month = ? AND category = ?`, [dbMonth(month), category], ); @@ -132,7 +132,7 @@ export function setGoal({ month, category, goal }): Promise { } export function setBuffer(month: string, amount: unknown): Promise { - let existing = db.firstSync( + const existing = db.firstSync( `SELECT id FROM zero_budget_months WHERE id = ?`, [month], ); @@ -151,7 +151,7 @@ function setCarryover( month: string, flag: boolean, ): Promise { - let existing = db.firstSync( + const existing = db.firstSync( `SELECT id FROM ${table} WHERE month = ? AND category = ?`, [month, category], ); @@ -173,9 +173,9 @@ export async function copyPreviousMonth({ }: { month: string; }): Promise { - let prevMonth = dbMonth(monthUtils.prevMonth(month)); - let table = getBudgetTable(); - let budgetData = await getBudgetData(table, prevMonth.toString()); + const prevMonth = dbMonth(monthUtils.prevMonth(month)); + const table = getBudgetTable(); + const budgetData = await getBudgetData(table, prevMonth.toString()); await batchMessages(async () => { budgetData.forEach(prevBudget => { @@ -198,8 +198,8 @@ export async function copySinglePreviousMonth({ month: string; category: string; }): Promise { - let prevMonth = monthUtils.prevMonth(month); - let newAmount = await getSheetValue( + const prevMonth = monthUtils.prevMonth(month); + const newAmount = await getSheetValue( monthUtils.sheetForMonth(prevMonth), 'budget-' + category, ); @@ -209,7 +209,7 @@ export async function copySinglePreviousMonth({ } export async function setZero({ month }: { month: string }): Promise { - let categories = await db.all( + const categories = await db.all( 'SELECT * FROM v_categories WHERE tombstone = 0', ); @@ -228,29 +228,29 @@ export async function set3MonthAvg({ }: { month: string; }): Promise { - let categories = await db.all( + const categories = await db.all( 'SELECT * FROM v_categories WHERE tombstone = 0', ); - let prevMonth1 = monthUtils.prevMonth(month); - let prevMonth2 = monthUtils.prevMonth(prevMonth1); - let prevMonth3 = monthUtils.prevMonth(prevMonth2); + const prevMonth1 = monthUtils.prevMonth(month); + const prevMonth2 = monthUtils.prevMonth(prevMonth1); + const prevMonth3 = monthUtils.prevMonth(prevMonth2); await batchMessages(async () => { - for (let cat of categories) { + for (const cat of categories) { if (cat.is_income === 1 && !isReflectBudget()) { continue; } - let spent1 = await getSheetValue( + const spent1 = await getSheetValue( monthUtils.sheetForMonth(prevMonth1), 'sum-amount-' + cat.id, ); - let spent2 = await getSheetValue( + const spent2 = await getSheetValue( monthUtils.sheetForMonth(prevMonth2), 'sum-amount-' + cat.id, ); - let spent3 = await getSheetValue( + const spent3 = await getSheetValue( monthUtils.sheetForMonth(prevMonth3), 'sum-amount-' + cat.id, ); @@ -292,16 +292,16 @@ export async function holdForNextMonth({ month: string; amount: number; }): Promise { - let row = await db.first( + const row = await db.first( 'SELECT buffered FROM zero_budget_months WHERE id = ?', [month], ); - let sheetName = monthUtils.sheetForMonth(month); - let toBudget = await getSheetValue(sheetName, 'to-budget'); + const sheetName = monthUtils.sheetForMonth(month); + const toBudget = await getSheetValue(sheetName, 'to-budget'); if (toBudget > 0) { - let bufferedAmount = calcBufferedAmount( + const bufferedAmount = calcBufferedAmount( toBudget, (row && row.buffered) || 0, amount, @@ -326,10 +326,10 @@ export async function coverOverspending({ to: string; from: string; }): Promise { - let sheetName = monthUtils.sheetForMonth(month); - let toBudgeted = await getSheetValue(sheetName, 'budget-' + to); - let leftover = await getSheetValue(sheetName, 'leftover-' + to); - let leftoverFrom = await getSheetValue( + const sheetName = monthUtils.sheetForMonth(month); + const toBudgeted = await getSheetValue(sheetName, 'budget-' + to); + const leftover = await getSheetValue(sheetName, 'leftover-' + to); + const leftoverFrom = await getSheetValue( sheetName, from === 'to-be-budgeted' ? 'to-budget' : 'leftover-' + from, ); @@ -338,7 +338,7 @@ export async function coverOverspending({ return; } - let amountCovered = Math.min(-leftover, leftoverFrom); + const amountCovered = Math.min(-leftover, leftoverFrom); // If we are covering it from the to be budgeted amount, ignore this if (from !== 'to-be-budgeted') { @@ -362,11 +362,11 @@ export async function transferAvailable({ amount: number; category: string; }): Promise { - let sheetName = monthUtils.sheetForMonth(month); - let leftover = await getSheetValue(sheetName, 'to-budget'); + const sheetName = monthUtils.sheetForMonth(month); + const leftover = await getSheetValue(sheetName, 'to-budget'); amount = Math.max(Math.min(amount, leftover), 0); - let budgeted = await getSheetValue(sheetName, 'budget-' + category); + const budgeted = await getSheetValue(sheetName, 'budget-' + category); await setBudget({ category, month, amount: budgeted + amount }); } @@ -403,11 +403,11 @@ export async function setCategoryCarryover({ category: string; flag: boolean; }): Promise { - let table = getBudgetTable(); - let months = getAllMonths(startMonth); + const table = getBudgetTable(); + const months = getAllMonths(startMonth); await batchMessages(async () => { - for (let month of months) { + for (const month of months) { setCarryover(table, category, dbMonth(month).toString(), flag); } }); diff --git a/packages/loot-core/src/server/budget/app.ts b/packages/loot-core/src/server/budget/app.ts index b9faad04a92..a7b5b23fc3e 100644 --- a/packages/loot-core/src/server/budget/app.ts +++ b/packages/loot-core/src/server/budget/app.ts @@ -7,7 +7,7 @@ import * as cleanupActions from './cleanup-template'; import * as goalActions from './goaltemplates'; import { BudgetHandlers } from './types/handlers'; -let app = createApp(); +const app = createApp(); app.method('budget/budget-amount', mutator(undoable(actions.setBudget))); app.method( diff --git a/packages/loot-core/src/server/budget/base.test.ts b/packages/loot-core/src/server/budget/base.test.ts index 69b078605e4..77e70974c7b 100644 --- a/packages/loot-core/src/server/budget/base.test.ts +++ b/packages/loot-core/src/server/budget/base.test.ts @@ -18,7 +18,7 @@ describe('Base budget', () => { name: 'income', is_income: 1, }); - let catId = await db.insertCategory({ + const catId = await db.insertCategory({ name: 'foo', cat_group: 'group1', }); diff --git a/packages/loot-core/src/server/budget/base.ts b/packages/loot-core/src/server/budget/base.ts index a380d1ffb19..6d4ae6f21dd 100644 --- a/packages/loot-core/src/server/budget/base.ts +++ b/packages/loot-core/src/server/budget/base.ts @@ -10,7 +10,7 @@ import * as rollover from './rollover'; import { sumAmounts } from './util'; export function getBudgetType() { - let meta = sheet.get().meta(); + const meta = sheet.get().meta(); return meta.budgetType || 'rollover'; } @@ -40,7 +40,7 @@ function createCategory(cat, sheetName, prevSheetName, start, end) { initialValue: 0, run: () => { // Making this sync is faster! - let rows = db.runQuery( + const rows = db.runQuery( `SELECT SUM(amount) as amount FROM v_transactions_internal_alive t LEFT JOIN accounts a ON a.id = t.account WHERE t.date >= ${start} AND t.date <= ${end} @@ -48,8 +48,8 @@ function createCategory(cat, sheetName, prevSheetName, start, end) { [], true, ); - let row = rows[0]; - let amount = row ? row.amount : 0; + const row = rows[0]; + const amount = row ? row.amount : 0; return amount || 0; }, }); @@ -85,7 +85,7 @@ function createCategoryGroup(group, sheetName) { function handleAccountChange(months, oldValue, newValue) { if (!oldValue || oldValue.offbudget !== newValue.offbudget) { - let rows = db.runQuery( + const rows = db.runQuery( ` SELECT DISTINCT(category) as category FROM transactions WHERE acct = ? @@ -95,7 +95,7 @@ function handleAccountChange(months, oldValue, newValue) { ); months.forEach(month => { - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); rows.forEach(row => { sheet @@ -117,8 +117,8 @@ function handleTransactionChange(transaction, changedFields) { transaction.date && transaction.category ) { - let month = monthUtils.monthFromDate(db.fromDateRepr(transaction.date)); - let sheetName = monthUtils.sheetForMonth(month); + const month = monthUtils.monthFromDate(db.fromDateRepr(transaction.date)); + const sheetName = monthUtils.sheetForMonth(month); sheet .get() @@ -128,7 +128,7 @@ function handleTransactionChange(transaction, changedFields) { function handleCategoryMappingChange(months, oldValue, newValue) { months.forEach(month => { - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); if (oldValue) { sheet .get() @@ -177,14 +177,14 @@ function handleCategoryChange(months, oldValue, newValue) { ]); } - let budgetType = getBudgetType(); + const budgetType = getBudgetType(); if (oldValue && oldValue.tombstone === 0 && newValue.tombstone === 1) { - let id = newValue.id; - let groupId = newValue.cat_group; + const id = newValue.id; + const groupId = newValue.cat_group; months.forEach(month => { - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); removeDeps(sheetName, groupId, id); }); } else if ( @@ -196,15 +196,15 @@ function handleCategoryChange(months, oldValue, newValue) { } months.forEach(month => { - let prevMonth = monthUtils.prevMonth(month); - let prevSheetName = monthUtils.sheetForMonth(prevMonth); - let sheetName = monthUtils.sheetForMonth(month); - let { start, end } = monthUtils.bounds(month); + const prevMonth = monthUtils.prevMonth(month); + const prevSheetName = monthUtils.sheetForMonth(prevMonth); + const sheetName = monthUtils.sheetForMonth(month); + const { start, end } = monthUtils.bounds(month); createCategory(newValue, sheetName, prevSheetName, start, end); - let id = newValue.id; - let groupId = newValue.cat_group; + const id = newValue.id; + const groupId = newValue.cat_group; if (getBudgetType() === 'rollover') { sheet @@ -219,10 +219,10 @@ function handleCategoryChange(months, oldValue, newValue) { }); } else if (oldValue && oldValue.cat_group !== newValue.cat_group) { // The category moved so we need to update the dependencies - let id = newValue.id; + const id = newValue.id; months.forEach(month => { - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); removeDeps(sheetName, oldValue.cat_group, id); addDeps(sheetName, newValue.cat_group, id); }); @@ -230,7 +230,7 @@ function handleCategoryChange(months, oldValue, newValue) { } function handleCategoryGroupChange(months, oldValue, newValue) { - let budgetType = getBudgetType(); + const budgetType = getBudgetType(); function addDeps(sheetName, groupId) { sheet @@ -269,26 +269,26 @@ function handleCategoryGroupChange(months, oldValue, newValue) { } if (newValue.tombstone === 1 && oldValue && oldValue.tombstone === 0) { - let id = newValue.id; + const id = newValue.id; months.forEach(month => { - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); removeDeps(sheetName, id); }); } else if ( newValue.tombstone === 0 && (!oldValue || oldValue.tombstone === 1) ) { - let group = newValue; + const group = newValue; if (!group.is_income || budgetType !== 'rollover') { months.forEach(month => { - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); // Dirty, dirty hack. These functions should not be async, but this is // OK because we're leveraging the sync nature of queries. Ideally we // wouldn't be querying here. But I think we have to. At least for now // we do - let categories = db.runQuery( + const categories = db.runQuery( 'SELECT * FROM categories WHERE tombstone = 0 AND cat_group = ?', [group.id], true, @@ -302,13 +302,13 @@ function handleCategoryGroupChange(months, oldValue, newValue) { } function handleBudgetMonthChange(budget) { - let sheetName = monthUtils.sheetForMonth(budget.id); + const sheetName = monthUtils.sheetForMonth(budget.id); sheet.get().set(`${sheetName}!buffered`, budget.buffered); } function handleBudgetChange(budget) { if (budget.category) { - let sheetName = monthUtils.sheetForMonth(budget.month.toString()); + const sheetName = monthUtils.sheetForMonth(budget.month.toString()); sheet .get() .set(`${sheetName}!budget-${budget.category}`, budget.amount || 0); @@ -323,22 +323,22 @@ function handleBudgetChange(budget) { } export function triggerBudgetChanges(oldValues, newValues) { - let { createdMonths = new Set() } = sheet.get().meta(); + const { createdMonths = new Set() } = sheet.get().meta(); sheet.startTransaction(); try { newValues.forEach((items, table) => { - let old = oldValues.get(table); + const old = oldValues.get(table); items.forEach(newValue => { - let oldValue = old && old.get(newValue.id); + const oldValue = old && old.get(newValue.id); if (table === 'zero_budget_months') { handleBudgetMonthChange(newValue); } else if (table === 'zero_budgets' || table === 'reflect_budgets') { handleBudgetChange(newValue); } else if (table === 'transactions') { - let changed = new Set( + const changed = new Set( Object.keys(getChangedValues(oldValue || {}, newValue) || {}), ); @@ -363,16 +363,16 @@ export function triggerBudgetChanges(oldValues, newValues) { } export async function doTransfer(categoryIds, transferId) { - let { createdMonths: months } = sheet.get().meta(); + const { createdMonths: months } = sheet.get().meta(); [...months].forEach(month => { - let totalValue = categoryIds + const totalValue = categoryIds .map(id => { return budgetActions.getBudget({ month, category: id }); }) .reduce((total, value) => total + value, 0); - let transferValue = budgetActions.getBudget({ + const transferValue = budgetActions.getBudget({ month, category: transferId, }); @@ -390,10 +390,10 @@ export async function createBudget(months) { const groups = await db.getCategoriesGrouped(); sheet.startTransaction(); - let meta = sheet.get().meta(); + const meta = sheet.get().meta(); meta.createdMonths = meta.createdMonths || new Set(); - let budgetType = getBudgetType(); + const budgetType = getBudgetType(); if (budgetType === 'rollover') { rollover.createBudget(meta, categories, months); @@ -401,10 +401,10 @@ export async function createBudget(months) { months.forEach(month => { if (!meta.createdMonths.has(month)) { - let prevMonth = monthUtils.prevMonth(month); - let { start, end } = monthUtils.bounds(month); - let sheetName = monthUtils.sheetForMonth(month); - let prevSheetName = monthUtils.sheetForMonth(prevMonth); + const prevMonth = monthUtils.prevMonth(month); + const { start, end } = monthUtils.bounds(month); + const sheetName = monthUtils.sheetForMonth(month); + const prevSheetName = monthUtils.sheetForMonth(prevMonth); categories.forEach(cat => { createCategory(cat, sheetName, prevSheetName, start, end); @@ -433,24 +433,24 @@ export async function createBudget(months) { } export async function createAllBudgets() { - let earliestTransaction = await db.first( + const earliestTransaction = await db.first( 'SELECT * FROM transactions WHERE isChild=0 AND date IS NOT NULL ORDER BY date ASC LIMIT 1', ); - let earliestDate = + const earliestDate = earliestTransaction && db.fromDateRepr(earliestTransaction.date); - let currentMonth = monthUtils.currentMonth(); + const currentMonth = monthUtils.currentMonth(); // Get the range based off of the earliest transaction and the // current month. If no transactions currently exist the current // month is also used as the starting month - let { start, end, range } = getBudgetRange( + const { start, end, range } = getBudgetRange( earliestDate || currentMonth, currentMonth, ); - let meta = sheet.get().meta(); - let createdMonths = meta.createdMonths || new Set(); - let newMonths = range.filter(m => !createdMonths.has(m)); + const meta = sheet.get().meta(); + const createdMonths = meta.createdMonths || new Set(); + const newMonths = range.filter(m => !createdMonths.has(m)); if (newMonths.length > 0) { await createBudget(range); @@ -460,7 +460,7 @@ export async function createAllBudgets() { } export async function setType(type) { - let meta = sheet.get().meta(); + const meta = sheet.get().meta(); if (type === meta.budgetType) { return; } @@ -469,10 +469,10 @@ export async function setType(type) { meta.createdMonths = new Set(); // Go through and force all the cells to be recomputed - let nodes = sheet.get().getNodes(); + const nodes = sheet.get().getNodes(); db.transaction(() => { - for (let name of nodes.keys()) { - let [sheetName, cellName] = name.split('!'); + for (const name of nodes.keys()) { + const [sheetName, cellName] = name.split('!'); if (sheetName.match(/^budget\d+/)) { sheet.get().deleteCell(sheetName, cellName); } @@ -481,7 +481,7 @@ export async function setType(type) { sheet.get().startCacheBarrier(); sheet.loadUserBudgets(db); - let bounds = await createAllBudgets(); + const bounds = await createAllBudgets(); sheet.get().endCacheBarrier(); return bounds; diff --git a/packages/loot-core/src/server/budget/cleanup-template.ts b/packages/loot-core/src/server/budget/cleanup-template.ts index 941b40da379..2c98e46e94b 100644 --- a/packages/loot-core/src/server/budget/cleanup-template.ts +++ b/packages/loot-core/src/server/budget/cleanup-template.ts @@ -13,21 +13,27 @@ async function processCleanup(month: string): Promise { let num_sources = 0; let num_sinks = 0; let total_weight = 0; - let errors = []; - let sinkCategory = []; + const errors = []; + const sinkCategory = []; - let category_templates = await getCategoryTemplates(); - let categories = await db.all( + const category_templates = await getCategoryTemplates(); + const categories = await db.all( 'SELECT * FROM v_categories WHERE tombstone = 0', ); - let sheetName = monthUtils.sheetForMonth(month); + const sheetName = monthUtils.sheetForMonth(month); for (let c = 0; c < categories.length; c++) { - let category = categories[c]; - let template = category_templates[category.id]; + const category = categories[c]; + const template = category_templates[category.id]; if (template) { if (template.filter(t => t.type === 'source').length > 0) { - let balance = await getSheetValue(sheetName, `leftover-${category.id}`); - let budgeted = await getSheetValue(sheetName, `budget-${category.id}`); + const balance = await getSheetValue( + sheetName, + `leftover-${category.id}`, + ); + const budgeted = await getSheetValue( + sheetName, + `budget-${category.id}`, + ); await setBudget({ category: category.id, month, @@ -44,14 +50,14 @@ async function processCleanup(month: string): Promise { } //funds all underfunded categories first unless the overspending rollover is checked - let db_month = parseInt(month.replace('-', '')); + const db_month = parseInt(month.replace('-', '')); for (let c = 0; c < categories.length; c++) { - let category = categories[c]; - let budgetAvailable = await getSheetValue(sheetName, `to-budget`); - let balance = await getSheetValue(sheetName, `leftover-${category.id}`); - let budgeted = await getSheetValue(sheetName, `budget-${category.id}`); - let to_budget = budgeted + Math.abs(balance); - let categoryId = category.id; + const category = categories[c]; + const budgetAvailable = await getSheetValue(sheetName, `to-budget`); + const balance = await getSheetValue(sheetName, `leftover-${category.id}`); + const budgeted = await getSheetValue(sheetName, `budget-${category.id}`); + const to_budget = budgeted + Math.abs(balance); + const categoryId = category.id; let carryover = await db.first( `SELECT carryover FROM zero_budgets WHERE month = ? and category = ?`, [db_month, categoryId], @@ -75,23 +81,27 @@ async function processCleanup(month: string): Promise { } } - let budgetAvailable = await getSheetValue(sheetName, `to-budget`); + const budgetAvailable = await getSheetValue(sheetName, `to-budget`); if (budgetAvailable <= 0) { errors.push('No funds are available to reallocate.'); } for (let c = 0; c < sinkCategory.length; c++) { - let budgeted = await getSheetValue( + const budgeted = await getSheetValue( sheetName, `budget-${sinkCategory[c].cat.id}`, ); - let categoryId = sinkCategory[c].cat.id; - let weight = sinkCategory[c].temp.filter(w => w.type === 'sink')[0].weight; + const categoryId = sinkCategory[c].cat.id; + const weight = sinkCategory[c].temp.filter(w => w.type === 'sink')[0] + .weight; let to_budget = budgeted + Math.round((weight / total_weight) * budgetAvailable); if (c === sinkCategory.length - 1) { - let currentBudgetAvailable = await getSheetValue(sheetName, `to-budget`); + const currentBudgetAvailable = await getSheetValue( + sheetName, + `to-budget`, + ); if (to_budget > currentBudgetAvailable) { to_budget = budgeted + currentBudgetAvailable; } @@ -115,7 +125,7 @@ async function processCleanup(month: string): Promise { return { type: 'message', message: 'All categories were up to date.' }; } } else { - let applied = `Successfully returned funds from ${num_sources} ${ + const applied = `Successfully returned funds from ${num_sources} ${ num_sources === 1 ? 'source' : 'sources' } and funded ${num_sinks} sinking ${num_sinks === 1 ? 'fund' : 'funds'}.`; if (errors.length) { @@ -135,21 +145,21 @@ async function processCleanup(month: string): Promise { const TEMPLATE_PREFIX = '#cleanup '; async function getCategoryTemplates() { - let templates = {}; + const templates = {}; - let notes = await db.all( + const notes = await db.all( `SELECT * FROM notes WHERE lower(note) like '%${TEMPLATE_PREFIX}%'`, ); for (let n = 0; n < notes.length; n++) { - let lines = notes[n].note.split('\n'); - let template_lines = []; + const lines = notes[n].note.split('\n'); + const template_lines = []; for (let l = 0; l < lines.length; l++) { - let line = lines[l].trim(); + const line = lines[l].trim(); if (!line.toLowerCase().startsWith(TEMPLATE_PREFIX)) continue; - let expression = line.slice(TEMPLATE_PREFIX.length); + const expression = line.slice(TEMPLATE_PREFIX.length); try { - let parsed = parse(expression); + const parsed = parse(expression); template_lines.push(parsed); } catch (e) { template_lines.push({ type: 'error', line, error: e }); diff --git a/packages/loot-core/src/server/budget/goals/goalsBy.ts b/packages/loot-core/src/server/budget/goals/goalsBy.ts index d58110a7220..b3fee6554c0 100644 --- a/packages/loot-core/src/server/budget/goals/goalsBy.ts +++ b/packages/loot-core/src/server/budget/goals/goalsBy.ts @@ -20,7 +20,7 @@ export async function goalsBy( target_month, current_month, ); - let repeat = + const repeat = template.type === 'by' ? template.repeat : (template.repeat || 1) * 12; while (num_months < 0 && repeat) { target_month = monthUtils.addMonths(target_month, repeat); @@ -38,7 +38,8 @@ export async function goalsBy( target = 0; remainder = Math.abs(remainder); } - let increment = num_months >= 0 ? Math.round(target / (num_months + 1)) : 0; + const increment = + num_months >= 0 ? Math.round(target / (num_months + 1)) : 0; to_budget += increment; } else { errors.push(`by templates are not supported in Report budgets`); diff --git a/packages/loot-core/src/server/budget/goals/goalsPercentage.ts b/packages/loot-core/src/server/budget/goals/goalsPercentage.ts index 4c8f5b8f8b3..ed0b3e2f16a 100644 --- a/packages/loot-core/src/server/budget/goals/goalsPercentage.ts +++ b/packages/loot-core/src/server/budget/goals/goalsPercentage.ts @@ -10,12 +10,12 @@ export async function goalsPercentage( to_budget, errors, ) { - let percent = template.percent; + const percent = template.percent; let monthlyIncome = 0; if (template.category.toLowerCase() === 'all income') { if (template.previous) { - let sheetName_lastmonth = monthUtils.sheetForMonth( + const sheetName_lastmonth = monthUtils.sheetForMonth( monthUtils.addMonths(month, -1), ); monthlyIncome = await getSheetValue(sheetName_lastmonth, 'total-income'); @@ -25,7 +25,7 @@ export async function goalsPercentage( } else if (template.category.toLowerCase() === 'available funds') { monthlyIncome = available_start; } else { - let income_category = (await db.getCategories()).find( + const income_category = (await db.getCategories()).find( c => c.is_income && c.name.toLowerCase() === template.category.toLowerCase(), ); @@ -34,7 +34,7 @@ export async function goalsPercentage( return { to_budget, errors }; } if (template.previous) { - let sheetName_lastmonth = monthUtils.sheetForMonth( + const sheetName_lastmonth = monthUtils.sheetForMonth( monthUtils.addMonths(month, -1), ); monthlyIncome = await getSheetValue( @@ -49,7 +49,7 @@ export async function goalsPercentage( } } - let increment = Math.max(0, Math.round(monthlyIncome * (percent / 100))); + const increment = Math.max(0, Math.round(monthlyIncome * (percent / 100))); to_budget += increment; return { to_budget, errors }; } diff --git a/packages/loot-core/src/server/budget/goals/goalsRemainder.ts b/packages/loot-core/src/server/budget/goals/goalsRemainder.ts index 9441dec1a89..db06e3cdb36 100644 --- a/packages/loot-core/src/server/budget/goals/goalsRemainder.ts +++ b/packages/loot-core/src/server/budget/goals/goalsRemainder.ts @@ -24,10 +24,10 @@ export function findRemainder(priority_list, categories, category_templates) { // find all remainder templates, place them at highest priority let remainder_found; let remainder_weight_total = 0; - let remainder_priority = priority_list[priority_list.length - 1] + 1; + const remainder_priority = priority_list[priority_list.length - 1] + 1; for (let c = 0; c < categories.length; c++) { - let category = categories[c]; - let templates = category_templates[category.id]; + const category = categories[c]; + const templates = category_templates[category.id]; if (templates) { for (let i = 0; i < templates.length; i++) { if (templates[i].type === 'remainder') { diff --git a/packages/loot-core/src/server/budget/goals/goalsSchedule.ts b/packages/loot-core/src/server/budget/goals/goalsSchedule.ts index 1caf145a543..cbe2bbfd978 100644 --- a/packages/loot-core/src/server/budget/goals/goalsSchedule.ts +++ b/packages/loot-core/src/server/budget/goals/goalsSchedule.ts @@ -16,39 +16,39 @@ export async function goalsSchedule( ) { if (!scheduleFlag) { scheduleFlag = true; - let template = template_lines.filter(t => t.type === 'schedule'); + const template = template_lines.filter(t => t.type === 'schedule'); //in the case of multiple templates per category, schedules may have wrong priority level let t = []; let totalScheduledGoal = 0; for (let ll = 0; ll < template.length; ll++) { - let { id: sid, completed: complete } = await db.first( + const { id: sid, completed: complete } = await db.first( 'SELECT * FROM schedules WHERE name = ?', [template[ll].name], ); console.log(complete); - let rule = await getRuleForSchedule(sid); - let conditions = rule.serialize().conditions; - let { date: dateConditions, amount: amountCondition } = + const rule = await getRuleForSchedule(sid); + const conditions = rule.serialize().conditions; + const { date: dateConditions, amount: amountCondition } = extractScheduleConds(conditions); - let target = + const target = amountCondition.op === 'isbetween' ? -Math.round( amountCondition.value.num1 + amountCondition.value.num2, ) / 2 : -amountCondition.value; - let next_date_string = getNextDate( + const next_date_string = getNextDate( dateConditions, monthUtils._parse(current_month), ); - let target_interval = dateConditions.value.interval + const target_interval = dateConditions.value.interval ? dateConditions.value.interval : 1; - let target_frequency = dateConditions.value.frequency; - let isRepeating = + const target_frequency = dateConditions.value.frequency; + const isRepeating = Object(dateConditions.value) === dateConditions.value && 'frequency' in dateConditions.value; - let num_months = monthUtils.differenceInCalendarMonths( + const num_months = monthUtils.differenceInCalendarMonths( next_date_string, current_month, ); @@ -64,7 +64,7 @@ export async function goalsSchedule( if (!complete) { if (isRepeating) { let monthlyTarget = 0; - let next_month = monthUtils.addMonths( + const next_month = monthUtils.addMonths( current_month, t[ll].num_months + 1, ); @@ -74,13 +74,13 @@ export async function goalsSchedule( ); while (next_date < next_month) { monthlyTarget += -target; - let current_date = next_date; + const current_date = next_date; next_date = monthUtils.addDays(next_date, 1); next_date = getNextDate( dateConditions, monthUtils._parse(next_date), ); - let diffDays = monthUtils.differenceInCalendarDays( + const diffDays = monthUtils.differenceInCalendarDays( next_date, current_date, ); diff --git a/packages/loot-core/src/server/budget/goals/goalsSimple.ts b/packages/loot-core/src/server/budget/goals/goalsSimple.ts index 7a297c323eb..d761955db2e 100644 --- a/packages/loot-core/src/server/budget/goals/goalsSimple.ts +++ b/packages/loot-core/src/server/budget/goals/goalsSimple.ts @@ -21,7 +21,7 @@ export async function goalsSimple( } let increment = 0; if (template.monthly != null) { - let monthly = amountToInteger(template.monthly); + const monthly = amountToInteger(template.monthly); increment = monthly; } else { increment = limit; diff --git a/packages/loot-core/src/server/budget/goals/goalsSpend.ts b/packages/loot-core/src/server/budget/goals/goalsSpend.ts index dc49fc18335..252238a9504 100644 --- a/packages/loot-core/src/server/budget/goals/goalsSpend.ts +++ b/packages/loot-core/src/server/budget/goals/goalsSpend.ts @@ -11,8 +11,8 @@ export async function goalsSpend( category, ) { // spend has 'amount' and 'from' and 'month' params - let from_month = `${template.from}-01`; - let to_month = `${template.month}-01`; + const from_month = `${template.from}-01`; + const to_month = `${template.month}-01`; let already_budgeted = last_month_balance; let first_month = true; for ( @@ -20,23 +20,23 @@ export async function goalsSpend( monthUtils.differenceInCalendarMonths(current_month, m) > 0; m = monthUtils.addMonths(m, 1) ) { - let sheetName = monthUtils.sheetForMonth(monthUtils.format(m, 'yyyy-MM')); + const sheetName = monthUtils.sheetForMonth(monthUtils.format(m, 'yyyy-MM')); if (first_month) { - let spent = await getSheetValue(sheetName, `sum-amount-${category.id}`); - let balance = await getSheetValue(sheetName, `leftover-${category.id}`); + const spent = await getSheetValue(sheetName, `sum-amount-${category.id}`); + const balance = await getSheetValue(sheetName, `leftover-${category.id}`); already_budgeted = balance - spent; first_month = false; } else { - let budgeted = await getSheetValue(sheetName, `budget-${category.id}`); + const budgeted = await getSheetValue(sheetName, `budget-${category.id}`); already_budgeted += budgeted; } } - let num_months = monthUtils.differenceInCalendarMonths( + const num_months = monthUtils.differenceInCalendarMonths( to_month, monthUtils._parse(current_month), ); - let target = amountToInteger(template.amount); + const target = amountToInteger(template.amount); let increment = 0; if (num_months < 0) { diff --git a/packages/loot-core/src/server/budget/goals/goalsWeek.ts b/packages/loot-core/src/server/budget/goals/goalsWeek.ts index f7d3aed87f2..7e4171cbd2a 100644 --- a/packages/loot-core/src/server/budget/goals/goalsWeek.ts +++ b/packages/loot-core/src/server/budget/goals/goalsWeek.ts @@ -11,8 +11,8 @@ export async function goalsWeek( errors, ) { // week has 'amount', 'starting', 'weeks' and optional 'limit' params - let amount = amountToInteger(template.amount); - let weeks = template.weeks != null ? Math.round(template.weeks) : 1; + const amount = amountToInteger(template.amount); + const weeks = template.weeks != null ? Math.round(template.weeks) : 1; if (template.limit != null) { if (limit > 0) { errors.push(`More than one “up to” limit found.`); @@ -24,7 +24,7 @@ export async function goalsWeek( } } let w = template.starting; - let next_month = monthUtils.addMonths(current_month, 1); + const next_month = monthUtils.addMonths(current_month, 1); while (w < next_month) { if (w >= current_month) { diff --git a/packages/loot-core/src/server/budget/goaltemplates.ts b/packages/loot-core/src/server/budget/goaltemplates.ts index 1c41d10dd43..31ea0406b4b 100644 --- a/packages/loot-core/src/server/budget/goaltemplates.ts +++ b/packages/loot-core/src/server/budget/goaltemplates.ts @@ -16,24 +16,24 @@ import { goalsWeek } from './goals/goalsWeek'; export async function applyTemplate({ month }) { await storeTemplates(); - let category_templates = await getTemplates(null); + const category_templates = await getTemplates(null); await resetCategoryTargets({ month, category: null }); return processTemplate(month, false, category_templates); } export async function overwriteTemplate({ month }) { await storeTemplates(); - let category_templates = await getTemplates(null); + const category_templates = await getTemplates(null); await resetCategoryTargets({ month, category: null }); return processTemplate(month, true, category_templates); } export async function applySingleCategoryTemplate({ month, category }) { - let categories = await db.all(`SELECT * FROM v_categories WHERE id = ?`, [ + const categories = await db.all(`SELECT * FROM v_categories WHERE id = ?`, [ category, ]); await storeTemplates(); - let category_templates = await getTemplates(categories[0]); + const category_templates = await getTemplates(categories[0]); await resetCategoryTargets({ month, category: categories }); return processTemplate(month, true, category_templates); } @@ -104,11 +104,11 @@ async function resetCategoryTargets({ month, category }) { async function storeTemplates() { //stores the template definitions to the database - let templates = await getCategoryTemplates(null); - let categories = await getCategories(); + const templates = await getCategoryTemplates(null); + const categories = await getCategories(); for (let c = 0; c < categories.length; c++) { - let template = templates[categories[c].id]; + const template = templates[categories[c].id]; if (template) { await db.update('categories', { id: categories[c].id, @@ -129,12 +129,12 @@ async function getTemplates(category) { 'SELECT * FROM categories WHERE goal_def IS NOT NULL', ); - let templates = []; + const templates = []; for (let ll = 0; ll < goal_def.length; ll++) { templates[goal_def[ll].id] = JSON.parse(goal_def[ll].goal_def); } if (category) { - let singleCategoryTemplate = {}; + const singleCategoryTemplate = {}; if (templates[category.id] !== undefined) { singleCategoryTemplate[category.id] = templates[category.id]; } @@ -152,20 +152,20 @@ async function processTemplate( let num_applied = 0; let errors = []; let originalCategoryBalance = []; - let idealTemplate = []; - let setToZero = []; + const idealTemplate = []; + const setToZero = []; let priority_list = []; - let categories = await getCategories(); + const categories = await getCategories(); //clears templated categories for (let c = 0; c < categories.length; c++) { - let category = categories[c]; - let budgeted = await getSheetValue( + const category = categories[c]; + const budgeted = await getSheetValue( monthUtils.sheetForMonth(month), `budget-${category.id}`, ); - let template = category_templates[category.id]; + const template = category_templates[category.id]; if (template) { for (let l = 0; l < template.length; l++) { //add each priority we need to a list. Will sort later @@ -202,28 +202,28 @@ async function processTemplate( }) .filter((item, index, curr) => curr.indexOf(item) === index); - let { remainder_found, remainder_priority, remainder_weight_total } = + const { remainder_found, remainder_priority, remainder_weight_total } = findRemainder(priority_list, categories, category_templates); if (remainder_found) priority_list.push(remainder_priority); - let sheetName = monthUtils.sheetForMonth(month); - let available_start = await getSheetValue(sheetName, `to-budget`); + const sheetName = monthUtils.sheetForMonth(month); + const available_start = await getSheetValue(sheetName, `to-budget`); let budgetAvailable = isReflectBudget() ? await getSheetValue(sheetName, `total-saved`) : await getSheetValue(sheetName, `to-budget`); for (let ii = 0; ii < priority_list.length; ii++) { - let priority = priority_list[ii]; - let templateBudget = []; + const priority = priority_list[ii]; + const templateBudget = []; // setup scaling for remainder let remainder_scale = 1; if (priority === remainder_priority && remainder_found) { - let available_now = await getSheetValue(sheetName, `to-budget`); + const available_now = await getSheetValue(sheetName, `to-budget`); remainder_scale = available_now / remainder_weight_total; } for (let c = 0; c < categories.length; c++) { - let category = categories[c]; + const category = categories[c]; let template_lines = category_templates[category.id]; if (template_lines) { //check that all schedule and by lines have the same priority level @@ -244,7 +244,7 @@ async function processTemplate( t.type === 'schedule' || t.type === 'by', ); - let { lowPriority, errorNotice } = await checkScheduleTemplates( + const { lowPriority, errorNotice } = await checkScheduleTemplates( template_lines, ); priorityCheck = lowPriority; @@ -278,11 +278,11 @@ async function processTemplate( ].join('\n'), ), ); - let prev_budgeted = await getSheetValue( + const prev_budgeted = await getSheetValue( sheetName, `budget-${category.id}`, ); - let { amount: to_budget, errors: applyErrors } = + const { amount: originalToBudget, errors: applyErrors } = await applyCategoryTemplate( category, template_lines, @@ -292,6 +292,8 @@ async function processTemplate( budgetAvailable, prev_budgeted, ); + + let to_budget = originalToBudget; if (to_budget != null) { num_applied++; //only store goals from non remainder templates @@ -376,7 +378,7 @@ async function processTemplate( return { type: 'message', message: 'All categories were up to date.' }; } } else { - let applied = `Successfully applied ${num_applied} templates.`; + const applied = `Successfully applied ${num_applied} templates.`; if (errors.length) { return { sticky: true, @@ -394,7 +396,7 @@ async function processTemplate( const TEMPLATE_PREFIX = '#template'; async function getCategoryTemplates(category) { - let templates = {}; + const templates = {}; let notes = await db.all( `SELECT * FROM notes WHERE lower(note) like '%${TEMPLATE_PREFIX}%'`, @@ -402,14 +404,14 @@ async function getCategoryTemplates(category) { if (category) notes = notes.filter(n => n.id === category.id); for (let n = 0; n < notes.length; n++) { - let lines = notes[n].note.split('\n'); - let template_lines = []; + const lines = notes[n].note.split('\n'); + const template_lines = []; for (let l = 0; l < lines.length; l++) { - let line = lines[l].trim(); + const line = lines[l].trim(); if (!line.toLowerCase().startsWith(TEMPLATE_PREFIX)) continue; - let expression = line.slice(TEMPLATE_PREFIX.length); + const expression = line.slice(TEMPLATE_PREFIX.length); try { - let parsed = parse(expression); + const parsed = parse(expression); template_lines.push(parsed); } catch (e) { template_lines.push({ type: 'error', line, error: e }); @@ -431,7 +433,7 @@ async function applyCategoryTemplate( budgetAvailable, prev_budgeted, ) { - let current_month = `${month}-01`; + const current_month = `${month}-01`; let errors = []; let all_schedule_names = await db.all( 'SELECT name from schedules WHERE name NOT NULL AND tombstone = 0', @@ -450,7 +452,7 @@ async function applyCategoryTemplate( target_month, current_month, ); - let repeat = template.annual + const repeat = template.annual ? (template.repeat || 1) * 12 : template.repeat; @@ -514,10 +516,10 @@ async function applyCategoryTemplate( let remainder = 0; for (let l = 0; l < template_lines.length; l++) { - let template = template_lines[l]; + const template = template_lines[l]; switch (template.type) { case 'simple': { - let goalsReturn = await goalsSimple( + const goalsReturn = await goalsSimple( template, limitCheck, errors, @@ -533,7 +535,7 @@ async function applyCategoryTemplate( break; } case 'by': { - let goalsReturn = await goalsBy( + const goalsReturn = await goalsBy( template_lines, current_month, template, @@ -549,7 +551,7 @@ async function applyCategoryTemplate( break; } case 'week': { - let goalsReturn = await goalsWeek( + const goalsReturn = await goalsWeek( template, limit, limitCheck, @@ -566,7 +568,7 @@ async function applyCategoryTemplate( break; } case 'spend': { - let goalsReturn = await goalsSpend( + const goalsReturn = await goalsSpend( template, last_month_balance, current_month, @@ -579,7 +581,7 @@ async function applyCategoryTemplate( break; } case 'percentage': { - let goalsReturn = await goalsPercentage( + const goalsReturn = await goalsPercentage( template, month, available_start, @@ -592,7 +594,7 @@ async function applyCategoryTemplate( break; } case 'schedule': { - let goalsReturn = await goalsSchedule( + const goalsReturn = await goalsSchedule( scheduleFlag, template_lines, current_month, @@ -609,7 +611,7 @@ async function applyCategoryTemplate( break; } case 'remainder': { - let goalsReturn = await goalsRemainder( + const goalsReturn = await goalsRemainder( template, budgetAvailable, remainder_scale, @@ -644,17 +646,17 @@ async function applyCategoryTemplate( } async function checkTemplates(): Promise { - let category_templates = await getCategoryTemplates(null); - let errors = []; + const category_templates = await getCategoryTemplates(null); + const errors = []; - let categories = await db.all( + const categories = await db.all( 'SELECT * FROM v_categories WHERE tombstone = 0', ); // run through each line and see if its an error for (let c = 0; c < categories.length; c++) { - let category = categories[c]; - let template = category_templates[category.id]; + const category = categories[c]; + const template = category_templates[category.id]; if (template) { for (let l = 0; l < template.length; l++) { if (template[l].type === 'error') { diff --git a/packages/loot-core/src/server/budget/report.ts b/packages/loot-core/src/server/budget/report.ts index 66ccf19e8d7..9ddb8c2f266 100644 --- a/packages/loot-core/src/server/budget/report.ts +++ b/packages/loot-core/src/server/budget/report.ts @@ -60,8 +60,8 @@ export async function createCategory(cat, sheetName, prevSheetName) { } export function createSummary(groups, categories, sheetName) { - let incomeGroup = groups.filter(group => group.is_income)[0]; - let expenseCategories = categories.filter(cat => !cat.is_income); + const incomeGroup = groups.filter(group => group.is_income)[0]; + const expenseCategories = categories.filter(cat => !cat.is_income); sheet.get().createDynamic(sheetName, 'total-budgeted', { initialValue: 0, diff --git a/packages/loot-core/src/server/budget/rollover.ts b/packages/loot-core/src/server/budget/rollover.ts index 20b3c60faaa..83a2923280e 100644 --- a/packages/loot-core/src/server/budget/rollover.ts +++ b/packages/loot-core/src/server/budget/rollover.ts @@ -6,13 +6,13 @@ import { resolveName } from '../spreadsheet/util'; import { number, sumAmounts, flatten2, unflatten2 } from './util'; function getBlankSheet(months) { - let blankMonth = monthUtils.prevMonth(months[0]); + const blankMonth = monthUtils.prevMonth(months[0]); return monthUtils.sheetForMonth(blankMonth); } export function createBlankCategory(cat, months) { if (months.length > 0) { - let sheetName = getBlankSheet(months); + const sheetName = getBlankSheet(months); sheet.get().createStatic(sheetName, `carryover-${cat.id}`, false); sheet.get().createStatic(sheetName, `leftover-${cat.id}`, 0); sheet.get().createStatic(sheetName, `leftover-pos-${cat.id}`, 0); @@ -71,8 +71,8 @@ export function createCategory(cat, sheetName, prevSheetName) { } export function createSummary(groups, categories, prevSheetName, sheetName) { - let incomeGroup = groups.filter(group => group.is_income)[0]; - let expenseCategories = categories.filter(cat => !cat.is_income); + const incomeGroup = groups.filter(group => group.is_income)[0]; + const expenseCategories = categories.filter(cat => !cat.is_income); sheet.get().createStatic(sheetName, 'buffered', 0); @@ -170,7 +170,7 @@ export function createBudget(meta, categories, months) { // The spreadsheet is now strict - so we need to fill in some // default values for the month before the first month. Only do this // if it doesn't already exist - let blankSheet = getBlankSheet(months); + const blankSheet = getBlankSheet(months); if (meta.blankSheet !== blankSheet) { sheet.get().clearSheet(meta.blankSheet); createBlankMonth(categories, blankSheet, months); diff --git a/packages/loot-core/src/server/budget/util.ts b/packages/loot-core/src/server/budget/util.ts index d36bacac6ce..54f17cead40 100644 --- a/packages/loot-core/src/server/budget/util.ts +++ b/packages/loot-core/src/server/budget/util.ts @@ -16,7 +16,7 @@ export function flatten2(arr) { } export function unflatten2(arr) { - let res = []; + const res = []; for (let i = 0; i < arr.length; i += 2) { res.push([arr[i], arr[i + 1]]); } diff --git a/packages/loot-core/src/server/cloud-storage.ts b/packages/loot-core/src/server/cloud-storage.ts index a7a7f9c7a42..dfd607302f4 100644 --- a/packages/loot-core/src/server/cloud-storage.ts +++ b/packages/loot-core/src/server/cloud-storage.ts @@ -19,7 +19,7 @@ import { post } from './post'; import * as prefs from './prefs'; import { getServer } from './server-config'; -let UPLOAD_FREQUENCY_IN_DAYS = 7; +const UPLOAD_FREQUENCY_IN_DAYS = 7; export interface RemoteFile { deleted: boolean; @@ -50,9 +50,9 @@ export async function checkKey(): Promise<{ valid: boolean; error?: { reason: string }; }> { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); - let { cloudFileId, encryptKeyId } = prefs.getPrefs(); + const { cloudFileId, encryptKeyId } = prefs.getPrefs(); let res; try { @@ -75,9 +75,9 @@ export async function checkKey(): Promise<{ } export async function resetSyncState(newKeyState) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); - let { cloudFileId } = prefs.getPrefs(); + const { cloudFileId } = prefs.getPrefs(); try { await post(getServer().SYNC_SERVER + '/reset-user-file', { @@ -116,22 +116,22 @@ export async function resetSyncState(newKeyState) { } export async function exportBuffer() { - let { id, budgetName } = prefs.getPrefs(); + const { id, budgetName } = prefs.getPrefs(); if (!budgetName) { return null; } - let budgetDir = fs.getBudgetDir(id); + const budgetDir = fs.getBudgetDir(id); // create zip - let zipped = new AdmZip(); + const zipped = new AdmZip(); // We run this in a mutator even though its not mutating anything // because we are reading the sqlite file from disk. We want to make // sure that we get a valid snapshot of it so we want this to be // serialized with all other mutations. await runMutator(async () => { - let rawDbContent = await fs.readFile( + const rawDbContent = await fs.readFile( fs.join(budgetDir, 'db.sqlite'), 'binary', ); @@ -139,7 +139,7 @@ export async function exportBuffer() { // Do some post-processing of the database. We NEVER upload the cache with // the database; this forces new downloads to always recompute everything // which is not only safer, but reduces the filesize a lot. - let memDb = await sqlite.openDatabase(rawDbContent); + const memDb = await sqlite.openDatabase(rawDbContent); sqlite.execQuery( memDb, ` @@ -148,18 +148,18 @@ export async function exportBuffer() { `, ); - let dbContent = await sqlite.exportDatabase(memDb); + const dbContent = await sqlite.exportDatabase(memDb); sqlite.closeDatabase(memDb); // mark it as a file that needs a new clock so when a new client // downloads it, it'll get set to a unique node - let meta = JSON.parse( + const meta = JSON.parse( await fs.readFile(fs.join(budgetDir, 'metadata.json')), ); meta.resetClock = true; - let metaContent = Buffer.from(JSON.stringify(meta), 'utf8'); + const metaContent = Buffer.from(JSON.stringify(meta), 'utf8'); zipped.addFile('db.sqlite', Buffer.from(dbContent)); zipped.addFile('metadata.json', metaContent); @@ -176,15 +176,15 @@ export async function importBuffer(fileData, buffer) { } catch (err) { throw FileDownloadError('not-zip-file'); } - let dbEntry = entries.find(e => e.entryName.includes('db.sqlite')); - let metaEntry = entries.find(e => e.entryName.includes('metadata.json')); + const dbEntry = entries.find(e => e.entryName.includes('db.sqlite')); + const metaEntry = entries.find(e => e.entryName.includes('metadata.json')); if (!dbEntry || !metaEntry) { throw FileDownloadError('invalid-zip-file'); } - let dbContent = zipped.readFile(dbEntry); - let metaContent = zipped.readFile(metaEntry); + const dbContent = zipped.readFile(dbEntry); + const metaContent = zipped.readFile(metaEntry); let meta; try { @@ -203,12 +203,12 @@ export async function importBuffer(fileData, buffer) { encryptKeyId: fileData.encryptMeta ? fileData.encryptMeta.keyId : null, }; - let budgetDir = fs.getBudgetDir(meta.id); + const budgetDir = fs.getBudgetDir(meta.id); if (await fs.exists(budgetDir)) { // Don't remove the directory so that backups are retained - let dbFile = fs.join(budgetDir, 'db.sqlite'); - let metaFile = fs.join(budgetDir, 'metadata.json'); + const dbFile = fs.join(budgetDir, 'db.sqlite'); + const metaFile = fs.join(budgetDir, 'metadata.json'); if (await fs.exists(dbFile)) { await fs.removeFile(dbFile); @@ -227,17 +227,24 @@ export async function importBuffer(fileData, buffer) { } export async function upload() { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { throw FileUploadError('unauthorized'); } - let zipContent = await exportBuffer(); + const zipContent = await exportBuffer(); if (zipContent == null) { return; } - let { id, groupId, budgetName, cloudFileId, encryptKeyId } = prefs.getPrefs(); + const { + id, + groupId, + budgetName, + cloudFileId: originalCloudFileId, + encryptKeyId, + } = prefs.getPrefs(); + let cloudFileId = originalCloudFileId; let uploadContent = zipContent; let uploadMeta = null; @@ -308,11 +315,11 @@ export async function upload() { } export async function possiblyUpload() { - let { cloudFileId, groupId, lastUploaded } = prefs.getPrefs(); + const { cloudFileId, groupId, lastUploaded } = prefs.getPrefs(); - let threshold = + const threshold = lastUploaded && monthUtils.addDays(lastUploaded, UPLOAD_FREQUENCY_IN_DAYS); - let currentDay = monthUtils.currentDay(); + const currentDay = monthUtils.currentDay(); // We only want to try to upload every UPLOAD_FREQUENCY_IN_DAYS days if (lastUploaded && currentDay < threshold) { @@ -330,7 +337,7 @@ export async function possiblyUpload() { } export async function removeFile(fileId) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); await post(getServer().SYNC_SERVER + '/delete-user-file', { token: userToken, @@ -339,7 +346,7 @@ export async function removeFile(fileId) { } export async function listRemoteFiles(): Promise { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return null; } @@ -368,7 +375,7 @@ export async function listRemoteFiles(): Promise { } export async function download(fileId) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); let buffer; try { @@ -410,7 +417,7 @@ export async function download(fileId) { ); throw FileDownloadError('internal', { fileId }); } - let fileData = res.data; + const fileData = res.data; // The download process checks if the server gave us decrypt // information. It is assumed that this key has already been loaded diff --git a/packages/loot-core/src/server/db/index.ts b/packages/loot-core/src/server/db/index.ts index 9b152daf858..fc0f76707a6 100644 --- a/packages/loot-core/src/server/db/index.ts +++ b/packages/loot-core/src/server/db/index.ts @@ -74,15 +74,15 @@ export function getDatabase() { } export async function loadClock() { - let row = await first('SELECT * FROM messages_clock'); + const row = await first('SELECT * FROM messages_clock'); if (row) { - let clock = deserializeClock(row.clock); + const clock = deserializeClock(row.clock); setClock(clock); } else { // No clock exists yet (first run of the app), so create a default // one. - let timestamp = new Timestamp(0, 0, makeClientId()); - let clock = makeClock(timestamp); + const timestamp = new Timestamp(0, 0, makeClientId()); + const clock = makeClock(timestamp); setClock(clock); await runQuery('INSERT INTO messages_clock (id, clock) VALUES (?, ?)', [ @@ -118,12 +118,12 @@ export function execQuery(sql) { // only needed in hot spots when you are running lots of queries. let _queryCache = new LRU({ max: 100 }); export function cache(sql) { - let cached = _queryCache.get(sql); + const cached = _queryCache.get(sql); if (cached) { return cached; } - let prepared = sqlite.prepare(db, sql); + const prepared = sqlite.prepare(db, sql); _queryCache.set(sql, prepared); return prepared; } @@ -176,7 +176,7 @@ export async function select(table, id) { } export async function update(table, params) { - let fields = Object.keys(params).filter(k => k !== 'id'); + const fields = Object.keys(params).filter(k => k !== 'id'); if (params.id == null) { throw new Error('update: id is required'); @@ -209,7 +209,7 @@ export async function insertWithUUID(table, row) { } export async function insert(table, row) { - let fields = Object.keys(row).filter(k => k !== 'id'); + const fields = Object.keys(row).filter(k => k !== 'id'); if (row.id == null) { throw new Error('insert: id is required'); @@ -241,14 +241,14 @@ export async function delete_(table, id) { } export async function selectWithSchema(table, sql, params) { - let rows = await runQuery(sql, params, true); + const rows = await runQuery(sql, params, true); return rows .map(row => convertFromSelect(schema, schemaConfig, table, row)) .filter(Boolean); } export async function selectFirstWithSchema(table, sql, params) { - let rows = await selectWithSchema(table, sql, params); + const rows = await selectWithSchema(table, sql, params); return rows.length > 0 ? rows[0] : null; } @@ -322,7 +322,7 @@ export async function moveCategoryGroup(id, targetId) { ); const { updates, sort_order } = shoveSortOrders(groups, targetId); - for (let info of updates) { + for (const info of updates) { await update('category_groups', info); } await update('category_groups', { id, sort_order }); @@ -374,7 +374,7 @@ export async function insertCategory( categories, categories.length > 0 ? categories[0].id : null, ); - for (let info of updates) { + for (const info of updates) { await update('categories', info); } sort_order = order; @@ -409,7 +409,7 @@ export async function moveCategory(id, groupId, targetId?: string) { ); const { updates, sort_order } = shoveSortOrders(categories, targetId); - for (let info of updates) { + for (const info of updates) { await update('categories', info); } await update('categories', { id, sort_order, cat_group: groupId }); @@ -424,7 +424,7 @@ export async function deleteCategory(category, transferId?: string) { 'SELECT * FROM category_mapping WHERE transferId = ?', [category.id], ); - for (let mapping of existingTransfers) { + for (const mapping of existingTransfers) { await update('category_mapping', { id: mapping.id, transferId }); } @@ -450,7 +450,7 @@ export async function insertPayee(payee) { } export async function deletePayee(payee) { - let { transfer_acct } = await first('SELECT * FROM payees WHERE id = ?', [ + const { transfer_acct } = await first('SELECT * FROM payees WHERE id = ?', [ payee.id, ]); if (transfer_acct) { @@ -480,7 +480,7 @@ export function updatePayee(payee) { export async function mergePayees(target, ids) { // Load in payees so we can check some stuff - let payees = groupById(await all('SELECT * FROM payees')); + const payees = groupById(await all('SELECT * FROM payees')); // Filter out any transfer payees if (payees[target].transfer_acct != null) { @@ -491,7 +491,7 @@ export async function mergePayees(target, ids) { await batchMessages(async () => { await Promise.all( ids.map(async id => { - let mappings = await all( + const mappings = await all( 'SELECT id FROM payee_mapping WHERE targetId = ?', [id], ); @@ -533,7 +533,7 @@ export function syncGetOrphanedPayees() { } export async function getOrphanedPayees() { - let rows = await all(` + const rows = await all(` SELECT p.id FROM payees p LEFT JOIN payee_mapping pm ON pm.id = p.id LEFT JOIN v_transactions_internal_alive t ON t.payee = pm.targetId @@ -565,7 +565,7 @@ export async function insertAccount(account) { ); // Don't pass a target in, it will default to appending at the end - let { sort_order } = shoveSortOrders(accounts); + const { sort_order } = shoveSortOrders(accounts); account = accountModel.validate({ ...account, sort_order }); return insertWithUUID('accounts', account); @@ -581,7 +581,7 @@ export function deleteAccount(account) { } export async function moveAccount(id, targetId) { - let account = await first('SELECT * FROM accounts WHERE id = ?', [id]); + const account = await first('SELECT * FROM accounts WHERE id = ?', [id]); let accounts; if (account.closed) { accounts = await all( @@ -596,7 +596,7 @@ export async function moveAccount(id, targetId) { const { updates, sort_order } = shoveSortOrders(accounts, targetId); await batchMessages(async () => { - for (let info of updates) { + for (const info of updates) { update('accounts', info); } update('accounts', { id, sort_order }); @@ -604,7 +604,7 @@ export async function moveAccount(id, targetId) { } export async function getTransaction(id) { - let rows = await selectWithSchema( + const rows = await selectWithSchema( 'transactions', 'SELECT * FROM v_transactions WHERE id = ?', [id], diff --git a/packages/loot-core/src/server/db/mappings.ts b/packages/loot-core/src/server/db/mappings.ts index 32358a38d25..0f8f9653d3f 100644 --- a/packages/loot-core/src/server/db/mappings.ts +++ b/packages/loot-core/src/server/db/mappings.ts @@ -21,11 +21,11 @@ let unlistenSync; export async function loadMappings() { // The mappings are separated into tables specific to the type of // data. But you know, we really could keep a global mapping table. - let categories = (await db.all('SELECT * FROM category_mapping')).map(r => [ + const categories = (await db.all('SELECT * FROM category_mapping')).map(r => [ r.id, r.transferId, ]); - let payees = (await db.all('SELECT * FROM payee_mapping')).map(r => [ + const payees = (await db.all('SELECT * FROM payee_mapping')).map(r => [ r.id, r.targetId, ]); @@ -42,7 +42,7 @@ export async function loadMappings() { function onApplySync(oldValues, newValues) { newValues.forEach((items, table) => { if (table.indexOf('mapping') !== -1) { - let field = table === 'category_mapping' ? 'transferId' : 'targetId'; + const field = table === 'category_mapping' ? 'transferId' : 'targetId'; items.forEach(newValue => { allMappings.set(newValue.id, newValue[field]); diff --git a/packages/loot-core/src/server/db/sort.ts b/packages/loot-core/src/server/db/sort.ts index c033c1f17c2..86a41a78b6d 100644 --- a/packages/loot-core/src/server/db/sort.ts +++ b/packages/loot-core/src/server/db/sort.ts @@ -17,7 +17,7 @@ export function shoveSortOrders(items, targetId?: string) { const to = items.findIndex(item => item.id === targetId); const target = items[to]; const before = items[to - 1]; - let updates = []; + const updates = []; // If no target is specified, append at the end if (!targetId || to === -1) { diff --git a/packages/loot-core/src/server/db/util.ts b/packages/loot-core/src/server/db/util.ts index 72e6b704f7d..89ad16ec56e 100644 --- a/packages/loot-core/src/server/db/util.ts +++ b/packages/loot-core/src/server/db/util.ts @@ -5,18 +5,20 @@ export async function incrFetch( makeQuery, params = [], ) { - let pageCount = 500; + const pageCount = 500; let results = []; let fetchedIds = new Set(); for (let i = 0; i < terms.length; i += pageCount) { - let slice = terms.slice(i, i + pageCount).filter(id => !fetchedIds.has(id)); + const slice = terms + .slice(i, i + pageCount) + .filter(id => !fetchedIds.has(id)); if (slice.length > 0) { - let filter = slice.map(id => compare(id)).join(' OR '); - let query = makeQuery('(' + filter + ')'); + const filter = slice.map(id => compare(id)).join(' OR '); + const query = makeQuery('(' + filter + ')'); - let rows = await runQuery(query, params, true); + const rows = await runQuery(query, params, true); fetchedIds = new Set([...fetchedIds, ...slice]); results = results.concat(rows); } @@ -26,8 +28,8 @@ export async function incrFetch( } export function whereIn(ids: string[], field: string) { - let ids2 = [...new Set(ids)]; + const ids2 = [...new Set(ids)]; // eslint-disable-next-line rulesdir/typography - let filter = `${field} IN (` + ids2.map(id => `'${id}'`).join(',') + ')'; + const filter = `${field} IN (` + ids2.map(id => `'${id}'`).join(',') + ')'; return filter; } diff --git a/packages/loot-core/src/server/encryption-internals.ts b/packages/loot-core/src/server/encryption-internals.ts index 02695f52936..b5ccfc62b45 100644 --- a/packages/loot-core/src/server/encryption-internals.ts +++ b/packages/loot-core/src/server/encryption-internals.ts @@ -1,6 +1,6 @@ import crypto from 'crypto'; -let ENCRYPTION_ALGORITHM = 'aes-256-gcm' as const; +const ENCRYPTION_ALGORITHM = 'aes-256-gcm' as const; export async function sha256String(str) { return crypto.createHash('sha256').update(str).digest('base64'); @@ -11,14 +11,18 @@ export function randomBytes(n) { } export function encrypt(masterKey, value) { - let masterKeyBuffer = masterKey.getValue().raw; + const masterKeyBuffer = masterKey.getValue().raw; // let iv = createKeyBuffer({ numBytes: 12, secret: masterKeyBuffer }); - let iv = crypto.randomBytes(12); - let cipher = crypto.createCipheriv(ENCRYPTION_ALGORITHM, masterKeyBuffer, iv); + const iv = crypto.randomBytes(12); + const cipher = crypto.createCipheriv( + ENCRYPTION_ALGORITHM, + masterKeyBuffer, + iv, + ); let encrypted = cipher.update(value); encrypted = Buffer.concat([encrypted, cipher.final()]); - let authTag = cipher.getAuthTag(); + const authTag = cipher.getAuthTag(); return { value: encrypted, @@ -32,13 +36,12 @@ export function encrypt(masterKey, value) { } export function decrypt(masterKey, encrypted, meta) { - let masterKeyBuffer = masterKey.getValue().raw; - let { algorithm, iv, authTag } = meta; - iv = Buffer.from(iv, 'base64'); - - authTag = Buffer.from(authTag, 'base64'); + const masterKeyBuffer = masterKey.getValue().raw; + const { algorithm, iv: originalIv, authTag: originalAuthTag } = meta; + const iv = Buffer.from(originalIv, 'base64'); + const authTag = Buffer.from(originalAuthTag, 'base64'); - let decipher = crypto.createDecipheriv(algorithm, masterKeyBuffer, iv); + const decipher = crypto.createDecipheriv(algorithm, masterKeyBuffer, iv); decipher.setAuthTag(authTag); let decrypted = decipher.update(encrypted); @@ -47,7 +50,7 @@ export function decrypt(masterKey, encrypted, meta) { } export function createKey({ secret, salt }) { - let buffer = createKeyBuffer({ secret, salt }); + const buffer = createKeyBuffer({ secret, salt }); return { raw: buffer, base64: buffer.toString('base64'), diff --git a/packages/loot-core/src/server/encryption-internals.web.ts b/packages/loot-core/src/server/encryption-internals.web.ts index 9a4161ef962..729128b88e7 100644 --- a/packages/loot-core/src/server/encryption-internals.web.ts +++ b/packages/loot-core/src/server/encryption-internals.web.ts @@ -1,4 +1,4 @@ -let ENCRYPTION_ALGORITHM = 'aes-256-gcm'; +const ENCRYPTION_ALGORITHM = 'aes-256-gcm'; function browserAlgorithmName(name) { switch (name) { @@ -11,9 +11,9 @@ function browserAlgorithmName(name) { export async function sha256String(str) { // @ts-expect-error TextEncoder might not accept an argument - let inputBuffer = new TextEncoder('utf-8').encode(str).buffer; - let buffer = await crypto.subtle.digest('sha-256', inputBuffer); - let outputStr = Array.from(new Uint8Array(buffer)) + const inputBuffer = new TextEncoder('utf-8').encode(str).buffer; + const buffer = await crypto.subtle.digest('sha-256', inputBuffer); + const outputStr = Array.from(new Uint8Array(buffer)) .map(n => String.fromCharCode(n)) .join(''); return btoa(outputStr); @@ -24,7 +24,7 @@ export function randomBytes(n) { } export async function encrypt(masterKey, value) { - let iv = crypto.getRandomValues(new Uint8Array(12)); + const iv = crypto.getRandomValues(new Uint8Array(12)); let encrypted = await crypto.subtle.encrypt( { @@ -39,7 +39,7 @@ export async function encrypt(masterKey, value) { encrypted = Buffer.from(encrypted); // Strip the auth tag off the end - let authTag = encrypted.slice(-16); + const authTag = encrypted.slice(-16); encrypted = encrypted.slice(0, -16); return { @@ -55,9 +55,9 @@ export async function encrypt(masterKey, value) { } export async function decrypt(masterKey, encrypted, meta) { - let { algorithm, iv, authTag } = meta; + const { algorithm, iv, authTag } = meta; - let decrypted = await crypto.subtle.decrypt( + const decrypted = await crypto.subtle.decrypt( { name: browserAlgorithmName(algorithm), iv: Buffer.from(iv, 'base64'), @@ -71,10 +71,10 @@ export async function decrypt(masterKey, encrypted, meta) { } export async function createKey({ secret, salt }) { - let passwordBuffer = Buffer.from(secret); - let saltBuffer = Buffer.from(salt); + const passwordBuffer = Buffer.from(secret); + const saltBuffer = Buffer.from(salt); - let passwordKey = await crypto.subtle.importKey( + const passwordKey = await crypto.subtle.importKey( 'raw', passwordBuffer, { name: 'PBKDF2' }, @@ -82,7 +82,7 @@ export async function createKey({ secret, salt }) { ['deriveBits', 'deriveKey'], ); - let derivedKey = await crypto.subtle.deriveKey( + const derivedKey = await crypto.subtle.deriveKey( { name: 'PBKDF2', hash: 'SHA-512', @@ -95,7 +95,7 @@ export async function createKey({ secret, salt }) { ['encrypt', 'decrypt'], ); - let exported = await crypto.subtle.exportKey('raw', derivedKey); + const exported = await crypto.subtle.exportKey('raw', derivedKey); return { raw: derivedKey, @@ -104,7 +104,7 @@ export async function createKey({ secret, salt }) { } export async function importKey(str) { - let key = await crypto.subtle.importKey( + const key = await crypto.subtle.importKey( 'raw', Buffer.from(str, 'base64'), { name: 'AES-GCM' }, diff --git a/packages/loot-core/src/server/encryption.test.ts b/packages/loot-core/src/server/encryption.test.ts index 3a9b249986e..14d5d265d6e 100644 --- a/packages/loot-core/src/server/encryption.test.ts +++ b/packages/loot-core/src/server/encryption.test.ts @@ -4,16 +4,16 @@ afterEach(() => encryption.unloadAllKeys()); describe('Encryption', () => { test('should encrypt and decrypt', async () => { - let key = await encryption.createKey({ + const key = await encryption.createKey({ id: 'foo', password: 'mypassword', salt: 'salt', }); await encryption.loadKey(key); - let data = await encryption.encrypt('hello', 'foo'); + const data = await encryption.encrypt('hello', 'foo'); - let output = await encryption.decrypt(data.value, data.meta); + const output = await encryption.decrypt(data.value, data.meta); expect(output.toString()).toBe('hello'); }); }); diff --git a/packages/loot-core/src/server/encryption.ts b/packages/loot-core/src/server/encryption.ts index 9ca45ee4f36..57bd5c72a8b 100644 --- a/packages/loot-core/src/server/encryption.ts +++ b/packages/loot-core/src/server/encryption.ts @@ -82,7 +82,7 @@ export function unloadAllKeys() { } export async function createKey({ id, password, salt }) { - let key = new Key({ id }); + const key = new Key({ id }); await key.createFromPassword({ password, salt }); return key; } diff --git a/packages/loot-core/src/server/filters/app.ts b/packages/loot-core/src/server/filters/app.ts index 748946475c9..3e3b5f62b77 100644 --- a/packages/loot-core/src/server/filters/app.ts +++ b/packages/loot-core/src/server/filters/app.ts @@ -23,7 +23,7 @@ const filterModel = { }, toJS(row) { - let { conditions, conditions_op, ...fields } = row; + const { conditions, conditions_op, ...fields } = row; return { ...fields, conditionsOp: conditions_op, @@ -32,7 +32,7 @@ const filterModel = { }, fromJS(filter) { - let { conditionsOp, ...row } = filter; + const { conditionsOp, ...row } = filter; if (conditionsOp) { row.conditions_op = conditionsOp; } @@ -41,7 +41,7 @@ const filterModel = { }; async function filterNameExists(name, filterId, newItem) { - let idForName = await db.first( + const idForName = await db.first( 'SELECT id from transaction_filters WHERE tombstone = 0 AND name = ?', [name], ); @@ -58,7 +58,7 @@ async function filterNameExists(name, filterId, newItem) { //TODO: Possible to simplify this? //use filters and maps function conditionExists(item, filters, newItem) { - let { conditions, conditionsOp } = item; + const { conditions, conditionsOp } = item; let condCheck = []; let fCondCheck = false; let fCondFound; @@ -102,8 +102,8 @@ function conditionExists(item, filters, newItem) { } async function createFilter(filter) { - let filterId = uuidv4(); - let item = { + const filterId = uuidv4(); + const item = { id: filterId, conditions: filter.state.conditions, conditionsOp: filter.state.conditionsOp, @@ -119,7 +119,7 @@ async function createFilter(filter) { } if (item.conditions.length > 0) { - let condExists = conditionExists(item, filter.filters, true); + const condExists = conditionExists(item, filter.filters, true); if (condExists) { throw new Error( 'Duplicate filter warning: conditions already exist. Filter name: ' + @@ -137,7 +137,7 @@ async function createFilter(filter) { } async function updateFilter(filter) { - let item = { + const item = { id: filter.state.id, conditions: filter.state.conditions, conditionsOp: filter.state.conditionsOp, @@ -152,7 +152,7 @@ async function updateFilter(filter) { } if (item.conditions.length > 0) { - let condExists = conditionExists(item, filter.filters, false); + const condExists = conditionExists(item, filter.filters, false); if (condExists) { throw new Error( 'Duplicate filter warning: conditions already exist. Filter name: ' + @@ -170,7 +170,7 @@ async function deleteFilter(id) { await db.delete_('transaction_filters', id); } -let app = createApp(); +const app = createApp(); app.method('filter-create', mutator(createFilter)); app.method('filter-update', mutator(updateFilter)); diff --git a/packages/loot-core/src/server/importers/actual.ts b/packages/loot-core/src/server/importers/actual.ts index ac1fe39dba4..69e5d178dd6 100644 --- a/packages/loot-core/src/server/importers/actual.ts +++ b/packages/loot-core/src/server/importers/actual.ts @@ -25,7 +25,7 @@ export default async function importActual(_filepath: string, buffer: Buffer) { // We never want to load cached data from imported files, so // delete the cache - let sqliteDb = await sqlite.openDatabase( + const sqliteDb = await sqlite.openDatabase( fs.join(fs.getBudgetDir(id), 'db.sqlite'), ); sqlite.execQuery( diff --git a/packages/loot-core/src/server/importers/index.ts b/packages/loot-core/src/server/importers/index.ts index 39cdbb86d6f..41d5248ef56 100644 --- a/packages/loot-core/src/server/importers/index.ts +++ b/packages/loot-core/src/server/importers/index.ts @@ -12,7 +12,7 @@ type Importer = { doImport(data: unknown): Promise; }; -let importers: Record, Importer> = { +const importers: Record, Importer> = { ynab4: YNAB4, ynab5: YNAB5, }; @@ -25,7 +25,7 @@ export async function handleBudgetImport( if (type === 'actual') { return importActual(filepath, buffer); } - let importer = importers[type]; + const importer = importers[type]; try { let data; let budgetName: string; diff --git a/packages/loot-core/src/server/importers/ynab4.ts b/packages/loot-core/src/server/importers/ynab4.ts index 9a6084c5107..45dc7d2d6f8 100644 --- a/packages/loot-core/src/server/importers/ynab4.ts +++ b/packages/loot-core/src/server/importers/ynab4.ts @@ -68,7 +68,7 @@ async function importCategories( // This can't be done in parallel because sort order depends // on insertion order - for (let category of subCategories) { + for (const category of subCategories) { if (!category.isTombstone) { const id = await actual.createCategory({ name: category.name, @@ -90,9 +90,9 @@ async function importPayees( data: YNAB4.YFull, entityIdMap: Map, ) { - for (let payee of data.payees) { + for (const payee of data.payees) { if (!payee.isTombstone) { - let id = await actual.createPayee({ + const id = await actual.createPayee({ name: payee.name, category: entityIdMap.get(payee.autoFillCategoryId) || null, transfer_acct: entityIdMap.get(payee.targetAccountId) || null, @@ -129,7 +129,7 @@ async function importTransactions( } function isOffBudget(acctId: string) { - let acct = accounts.find(acct => acct.id === acctId); + const acct = accounts.find(acct => acct.id === acctId); if (!acct) { throw new Error('Could not find account for transaction when importing'); } @@ -138,32 +138,32 @@ async function importTransactions( // Go ahead and generate ids for all of the transactions so we can // reliably resolve transfers - for (let transaction of data.transactions) { + for (const transaction of data.transactions) { entityIdMap.set(transaction.entityId, uuidv4()); if (transaction.subTransactions) { - for (let subTransaction of transaction.subTransactions) { + for (const subTransaction of transaction.subTransactions) { entityIdMap.set(subTransaction.entityId, uuidv4()); } } } - let transactionsGrouped = groupBy(data.transactions, 'accountId'); + const transactionsGrouped = groupBy(data.transactions, 'accountId'); await Promise.all( [...transactionsGrouped.keys()].map(async accountId => { - let transactions = transactionsGrouped.get(accountId); + const transactions = transactionsGrouped.get(accountId); - let toImport = transactions + const toImport = transactions .map(transaction => { if (transaction.isTombstone) { return null; } - let id = entityIdMap.get(transaction.entityId); + const id = entityIdMap.get(transaction.entityId); function transferProperties(t: YNAB4.SubTransaction) { - let transferId = entityIdMap.get(t.transferTransactionId) || null; + const transferId = entityIdMap.get(t.transferTransactionId) || null; let payee = null; let imported_payee = null; @@ -185,7 +185,7 @@ async function importTransactions( }; } - let newTransaction = { + const newTransaction = { id, amount: amountToInteger(transaction.amount), category: isOffBudget(entityIdMap.get(accountId)) @@ -254,20 +254,20 @@ async function importBudgets( data: YNAB4.YFull, entityIdMap: Map, ) { - let budgets = sortByKey(data.monthlyBudgets, 'month'); + const budgets = sortByKey(data.monthlyBudgets, 'month'); await actual.batchBudgetUpdates(async () => { - for (let budget of budgets) { - let filled = fillInBudgets( + for (const budget of budgets) { + const filled = fillInBudgets( data, budget.monthlySubCategoryBudgets.filter(b => !b.isTombstone), ); await Promise.all( filled.map(async catBudget => { - let amount = amountToInteger(catBudget.budgeted); - let catId = entityIdMap.get(catBudget.categoryId); - let month = monthUtils.monthFromDate(budget.month); + const amount = amountToInteger(catBudget.budgeted); + const catId = entityIdMap.get(catBudget.categoryId); + const month = monthUtils.monthFromDate(budget.month); if (!catId) { return; } @@ -357,7 +357,7 @@ export function getBudgetName(filepath, _data) { // Most budgets are named like "Budget~51938D82.ynab4" but sometimes // they are only "Budget.ynab4". We only want to grab the name // before the ~ if it exists. - let m = unixFilepath.match(/([^/~]+)[^/]*$/); + const m = unixFilepath.match(/([^/~]+)[^/]*$/); if (!m) { return null; } @@ -365,7 +365,7 @@ export function getBudgetName(filepath, _data) { } function getFile(entries: AdmZip.IZipEntry[], path: string) { - let files = entries.filter(e => e.entryName === path); + const files = entries.filter(e => e.entryName === path); if (files.length === 0) { throw new Error('Could not find file: ' + path); } @@ -382,23 +382,23 @@ function join(...paths: string[]): string { } export function parseFile(buffer: Buffer): YNAB4.YFull { - let zipped = new AdmZip(buffer); - let entries = zipped.getEntries(); + const zipped = new AdmZip(buffer); + const entries = zipped.getEntries(); let root = ''; - let dirMatch = entries[0].entryName.match(/([^/]*\.ynab4)/); + const dirMatch = entries[0].entryName.match(/([^/]*\.ynab4)/); if (dirMatch) { root = dirMatch[1] + '/'; } - let metaStr = zipped.readFile(getFile(entries, root + 'Budget.ymeta')); - let meta = JSON.parse(metaStr.toString('utf8')); - let budgetPath = join(root, meta.relativeDataFolderName); + const metaStr = zipped.readFile(getFile(entries, root + 'Budget.ymeta')); + const meta = JSON.parse(metaStr.toString('utf8')); + const budgetPath = join(root, meta.relativeDataFolderName); - let deviceFiles = entries.filter(e => + const deviceFiles = entries.filter(e => e.entryName.startsWith(join(budgetPath, 'devices')), ); - let deviceGUID = findLatestDevice(zipped, deviceFiles); + const deviceGUID = findLatestDevice(zipped, deviceFiles); const yfullPath = join(budgetPath, deviceGUID, 'Budget.yfull'); let contents; diff --git a/packages/loot-core/src/server/importers/ynab5.ts b/packages/loot-core/src/server/importers/ynab5.ts index 6268ffba4aa..0653a1f95a1 100644 --- a/packages/loot-core/src/server/importers/ynab5.ts +++ b/packages/loot-core/src/server/importers/ynab5.ts @@ -20,7 +20,7 @@ function importAccounts(data: YNAB5.Budget, entityIdMap: Map) { return Promise.all( data.accounts.map(async account => { if (!account.deleted) { - let id = await actual.createAccount({ + const id = await actual.createAccount({ name: account.name, offbudget: account.on_budget ? false : true, closed: account.closed, @@ -65,7 +65,7 @@ async function importCategories( // Can't be done in parallel to have // correct sort order. - for (let group of data.category_groups) { + for (const group of data.category_groups) { if (!group.deleted) { let groupId; // Ignores internal category and credit cards @@ -80,18 +80,18 @@ async function importCategories( entityIdMap.set(group.id, groupId); } - let cats = data.categories.filter( + const cats = data.categories.filter( cat => cat.category_group_id === group.id, ); - for (let cat of cats.reverse()) { + for (const cat of cats.reverse()) { if (!cat.deleted) { // Handles special categories. Starting balance is a payee // in YNAB so it's handled in importTransactions switch (checkSpecialCat(cat)) { case 'income': { // doesn't create new category, only assigns id - let id = incomeCatId; + const id = incomeCatId; entityIdMap.set(cat.id, id); break; } @@ -99,7 +99,7 @@ async function importCategories( case 'internal': // uncategorized is ignored too, handled by actual break; default: { - let id = await actual.createCategory({ + const id = await actual.createCategory({ name: cat.name, group_id: groupId, }); @@ -117,7 +117,7 @@ function importPayees(data: YNAB5.Budget, entityIdMap: Map) { return Promise.all( data.payees.map(async payee => { if (!payee.deleted) { - let id = await actual.createPayee({ + const id = await actual.createPayee({ name: payee.name, }); entityIdMap.set(payee.id, id); @@ -140,8 +140,11 @@ async function importTransactions( payee => payee.name === 'Starting Balance', ).id; - let transactionsGrouped = groupBy(data.transactions, 'account_id'); - let subtransactionsGrouped = groupBy(data.subtransactions, 'transaction_id'); + const transactionsGrouped = groupBy(data.transactions, 'account_id'); + const subtransactionsGrouped = groupBy( + data.subtransactions, + 'transaction_id', + ); const payeesByTransferAcct = payees .filter((payee: YNAB5.Payee) => payee?.transfer_acct) @@ -152,27 +155,27 @@ async function importTransactions( // Go ahead and generate ids for all of the transactions so we can // reliably resolve transfers - for (let transaction of data.transactions) { + for (const transaction of data.transactions) { entityIdMap.set(transaction.id, uuidv4()); } - for (let transaction of data.subtransactions) { + for (const transaction of data.subtransactions) { entityIdMap.set(transaction.id, uuidv4()); } await Promise.all( [...transactionsGrouped.keys()].map(async accountId => { - let transactions = transactionsGrouped.get(accountId); + const transactions = transactionsGrouped.get(accountId); - let toImport = transactions + const toImport = transactions .map(transaction => { if (transaction.deleted) { return null; } - let subtransactions = subtransactionsGrouped.get(transaction.id); + const subtransactions = subtransactionsGrouped.get(transaction.id); // Add transaction - let newTransaction = { + const newTransaction = { id: entityIdMap.get(transaction.id), account: entityIdMap.get(transaction.account_id), date: transaction.date, @@ -253,7 +256,7 @@ async function importBudgets( // Also, there could be a way to set rollover using // Deferred Income Subcat and Immediate Income Subcat - let budgets = sortByKey(data.months, 'month'); + const budgets = sortByKey(data.months, 'month'); const internalCatIdYnab = data.category_groups.find( group => group.name === 'Internal Master Category', @@ -263,13 +266,13 @@ async function importBudgets( ).id; await actual.batchBudgetUpdates(async () => { - for (let budget of budgets) { - let month = monthUtils.monthFromDate(budget.month); + for (const budget of budgets) { + const month = monthUtils.monthFromDate(budget.month); await Promise.all( budget.categories.map(async catBudget => { - let catId = entityIdMap.get(catBudget.id); - let amount = Math.round(catBudget.budgeted / 10); + const catId = entityIdMap.get(catBudget.id); + const amount = Math.round(catBudget.budgeted / 10); if ( !catId || diff --git a/packages/loot-core/src/server/main.test.ts b/packages/loot-core/src/server/main.test.ts index 8854ae5747c..a6d3b38a36a 100644 --- a/packages/loot-core/src/server/main.test.ts +++ b/packages/loot-core/src/server/main.test.ts @@ -34,8 +34,8 @@ afterEach(async () => { }); async function createTestBudget(name) { - let templatePath = fs.join(__dirname, '/../mocks/files', name); - let budgetPath = fs.join(__dirname, '/../mocks/files/budgets/test-budget'); + const templatePath = fs.join(__dirname, '/../mocks/files', name); + const budgetPath = fs.join(__dirname, '/../mocks/files/budgets/test-budget'); fs._setDocumentDir(fs.join(budgetPath, '..')); await fs.mkdir(budgetPath); @@ -52,7 +52,10 @@ async function createTestBudget(name) { describe('Budgets', () => { afterEach(async () => { fs._setDocumentDir(null); - let budgetPath = fs.join(__dirname, '/../mocks/files/budgets/test-budget'); + const budgetPath = fs.join( + __dirname, + '/../mocks/files/budgets/test-budget', + ); if (await fs.exists(budgetPath)) { await fs.removeDirRecursively(budgetPath); @@ -64,9 +67,9 @@ describe('Budgets', () => { // Grab the clock to compare later await db.openDatabase('test-budget'); - let row = await db.first('SELECT * FROM messages_clock'); + const row = await db.first('SELECT * FROM messages_clock'); - let { error } = await runHandler(handlers['load-budget'], { + const { error } = await runHandler(handlers['load-budget'], { id: 'test-budget', }); expect(error).toBe(undefined); @@ -86,7 +89,7 @@ describe('Budgets', () => { const spy = jest.spyOn(console, 'warn').mockImplementation(); - let { error } = await runHandler(handlers['load-budget'], { + const { error } = await runHandler(handlers['load-budget'], { id: 'test-budget', }); // There should be an error and the budget should be unloaded @@ -120,7 +123,7 @@ describe('Accounts', () => { // Get accounts from the server. This isn't the normal API call, // we know that the mock server just returns hardcoded accounts - let { accounts } = await post('/plaid/accounts', {}); + const { accounts } = await post('/plaid/accounts', {}); // Create the accounts for the bank (bank is generally ignored in tests) await runHandler(handlers['accounts-connect'], { @@ -135,7 +138,7 @@ describe('Accounts', () => { // Go through each account and make sure the starting balance was // created correctly const res = await db.all('SELECT * FROM accounts'); - for (let account of res) { + for (const account of res) { const sum = await db.first( 'SELECT sum(amount) as sum FROM transactions WHERE acct = ? AND starting_balance_flag = 0', [account.id], @@ -186,7 +189,7 @@ describe('Accounts', () => { payee: 'transfer-two', date: '2017-01-01', }); - let differ = expectSnapshotWithDiffer( + const differ = expectSnapshotWithDiffer( await db.all('SELECT * FROM transactions'), ); @@ -251,7 +254,7 @@ describe('Budget', () => { function captureChangedCells(func) { return new Promise(async resolve => { let changed = []; - let remove = spreadsheet.addEventListener('change', ({ names }) => { + const remove = spreadsheet.addEventListener('change', ({ names }) => { changed = changed.concat(names); }); await func(); @@ -303,7 +306,7 @@ describe('Budget', () => { }); await db.runQuery("INSERT INTO accounts (id, name) VALUES ('boa', 'boa')"); - let trans = { + const trans = { id: 'boa-transaction', date: '2017-02-06', amount: 5000, @@ -355,7 +358,7 @@ describe('Categories', () => { test('transfers properly when deleted', async () => { await sheet.loadSpreadsheet(db); - let transId = await runMutator(async () => { + const transId = await runMutator(async () => { await db.insertCategoryGroup({ id: 'group1', name: 'group1' }); await db.insertCategoryGroup({ id: 'group1b', name: 'group1b' }); await db.insertCategoryGroup({ @@ -389,7 +392,7 @@ describe('Categories', () => { await budget.createAllBudgets(); // Set a budget value for the category `foo` of 1000 - let sheetName = monthUtils.sheetForMonth('2018-01'); + const sheetName = monthUtils.sheetForMonth('2018-01'); await budgetActions.setBudget({ category: 'foo', month: '2018-01', @@ -417,7 +420,7 @@ describe('Categories', () => { // Transfering an income category to an expense just doesn't make // sense. Make sure this doesn't do anything. - let { error } = await runHandler(handlers['category-delete'], { + const { error } = await runHandler(handlers['category-delete'], { id: 'income1', transferId: 'bar', }); diff --git a/packages/loot-core/src/server/main.ts b/packages/loot-core/src/server/main.ts index 1a4baaee73c..851f2a601d7 100644 --- a/packages/loot-core/src/server/main.ts +++ b/packages/loot-core/src/server/main.ts @@ -68,14 +68,14 @@ import { withUndo, clearUndo, undo, redo } from './undo'; import { updateVersion } from './update'; import { uniqueFileName, idFromFileName } from './util/budget-name'; -let DEMO_BUDGET_ID = '_demo-budget'; -let TEST_BUDGET_ID = '_test-budget'; +const DEMO_BUDGET_ID = '_demo-budget'; +const TEST_BUDGET_ID = '_test-budget'; // util function onSheetChange({ names }) { const nodes = names.map(name => { - let node = sheet.get()._getNode(name); + const node = sheet.get()._getNode(name); return { name: node.name, value: node.value }; }); connection.send('cells-changed', nodes); @@ -102,7 +102,7 @@ handlers['transactions-batch-update'] = mutator(async function ({ learnCategories, }) { return withUndo(async () => { - let result = await batchUpdateTransactions({ + const result = await batchUpdateTransactions({ added, updated, deleted, @@ -154,7 +154,7 @@ handlers['get-categories'] = async function () { }; handlers['get-earliest-transaction'] = async function () { - let { data } = await aqlQuery( + const { data } = await aqlQuery( q('transactions') .options({ splits: 'none' }) .orderBy({ date: 'asc' }) @@ -169,11 +169,11 @@ handlers['get-budget-bounds'] = async function () { }; handlers['rollover-budget-month'] = async function ({ month }) { - let groups = await db.getCategoriesGrouped(); - let sheetName = monthUtils.sheetForMonth(month); + const groups = await db.getCategoriesGrouped(); + const sheetName = monthUtils.sheetForMonth(month); function value(name) { - let v = sheet.getCellValue(sheetName, name); + const v = sheet.getCellValue(sheetName, name); return { value: v === '' ? 0 : v, name: resolveName(sheetName, name) }; } @@ -190,11 +190,11 @@ handlers['rollover-budget-month'] = async function ({ month }) { value('total-leftover'), ]; - for (let group of groups) { + for (const group of groups) { if (group.is_income) { values.push(value('total-income')); - for (let cat of group.categories) { + for (const cat of group.categories) { values.push(value(`sum-amount-${cat.id}`)); } } else { @@ -204,7 +204,7 @@ handlers['rollover-budget-month'] = async function ({ month }) { value(`group-leftover-${group.id}`), ]); - for (let cat of group.categories) { + for (const cat of group.categories) { values = values.concat([ value(`budget-${cat.id}`), value(`sum-amount-${cat.id}`), @@ -219,11 +219,11 @@ handlers['rollover-budget-month'] = async function ({ month }) { }; handlers['report-budget-month'] = async function ({ month }) { - let groups = await db.getCategoriesGrouped(); - let sheetName = monthUtils.sheetForMonth(month); + const groups = await db.getCategoriesGrouped(); + const sheetName = monthUtils.sheetForMonth(month); function value(name) { - let v = sheet.getCellValue(sheetName, name); + const v = sheet.getCellValue(sheetName, name); return { value: v === '' ? 0 : v, name: resolveName(sheetName, name) }; } @@ -237,14 +237,14 @@ handlers['report-budget-month'] = async function ({ month }) { value('total-leftover'), ]; - for (let group of groups) { + for (const group of groups) { values = values.concat([ value(`group-budget-${group.id}`), value(`group-sum-amount-${group.id}`), value(`group-leftover-${group.id}`), ]); - for (let cat of group.categories) { + for (const cat of group.categories) { values = values.concat([ value(`budget-${cat.id}`), value(`sum-amount-${cat.id}`), @@ -319,7 +319,7 @@ handlers['category-delete'] = mutator(async function ({ id, transferId }) { return withUndo(async () => { let result = {}; await batchMessages(async () => { - let row = await db.first( + const row = await db.first( 'SELECT is_income FROM categories WHERE id = ?', [id], ); @@ -328,7 +328,7 @@ handlers['category-delete'] = mutator(async function ({ id, transferId }) { return; } - let transfer = + const transfer = transferId && (await db.first('SELECT is_income FROM categories WHERE id = ?', [ transferId, @@ -446,7 +446,7 @@ handlers['payees-get-orphaned'] = async function () { }; handlers['payees-get-rule-counts'] = async function () { - let payeeCounts = {}; + const payeeCounts = {}; rules.iterateIds(rules.getRules(), 'payee', (rule, id) => { if (payeeCounts[id] == null) { @@ -490,7 +490,7 @@ handlers['payees-batch-change'] = mutator(async function ({ }); handlers['payees-check-orphaned'] = async function ({ ids }) { - let orphaned = new Set(await db.getOrphanedPayees()); + const orphaned = new Set(await db.getOrphanedPayees()); return ids.filter(id => orphaned.has(id)); }; @@ -504,10 +504,10 @@ handlers['make-filters-from-conditions'] = async function ({ conditions }) { handlers['getCell'] = async function ({ sheetName, name }) { // Fields is no longer used - hardcode - let fields = ['name', 'value']; - let node = sheet.get()._getNode(resolveName(sheetName, name)); + const fields = ['name', 'value']; + const node = sheet.get()._getNode(resolveName(sheetName, name)); if (fields) { - let res = {}; + const res = {}; fields.forEach(field => { if (field === 'run') { res[field] = node._run ? node._run.toString() : null; @@ -526,9 +526,9 @@ handlers['getCells'] = async function ({ names }) { }; handlers['getCellNamesInSheet'] = async function ({ sheetName }) { - let names = []; - for (let name of sheet.get().getNodes().keys()) { - let { sheet: nodeSheet, name: nodeName } = unresolveName(name); + const names = []; + for (const name of sheet.get().getNodes().keys()) { + const { sheet: nodeSheet, name: nodeName } = unresolveName(name); if (nodeSheet === sheetName) { names.push(nodeName); } @@ -537,7 +537,7 @@ handlers['getCellNamesInSheet'] = async function ({ sheetName }) { }; handlers['debugCell'] = async function ({ sheetName, name }) { - let node = sheet.get().getNode(resolveName(sheetName, name)); + const node = sheet.get().getNode(resolveName(sheetName, name)); return { ...node, _run: node._run && node._run.toString(), @@ -608,16 +608,20 @@ handlers['accounts-link'] = async function ({ accountId, upgradingId, }) { - let bankId = await link.handoffPublicToken(institution, publicToken); + const bankId = await link.handoffPublicToken(institution, publicToken); - let [[, userId], [, userKey]] = await asyncStorage.multiGet([ + const [[, userId], [, userKey]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); // Get all the available accounts and find the selected one - let accounts = await bankSync.getGoCardlessAccounts(userId, userKey, bankId); - let account = accounts.find(acct => acct.account_id === accountId); + const accounts = await bankSync.getGoCardlessAccounts( + userId, + userKey, + bankId, + ); + const account = accounts.find(acct => acct.account_id === accountId); await db.update('accounts', { id: upgradingId, @@ -652,7 +656,7 @@ handlers['gocardless-accounts-link'] = async function ({ upgradingId, }) { let id; - let bank = await link.findOrCreateBank(account.institution, requisitionId); + const bank = await link.findOrCreateBank(account.institution, requisitionId); if (upgradingId) { const accRow = await db.first('SELECT * FROM accounts WHERE id = ?', [ @@ -702,8 +706,8 @@ handlers['accounts-connect'] = async function ({ accountIds, offbudgetIds, }) { - let bankId = await link.handoffPublicToken(institution, publicToken); - let ids = await link.addAccounts(bankId, accountIds, offbudgetIds); + const bankId = await link.handoffPublicToken(institution, publicToken); + const ids = await link.addAccounts(bankId, accountIds, offbudgetIds); return ids; }; @@ -713,8 +717,12 @@ handlers['gocardless-accounts-connect'] = async function ({ accountIds, offbudgetIds, }) { - let bankId = await link.handoffPublicToken(institution, publicToken); - let ids = await link.addGoCardlessAccounts(bankId, accountIds, offbudgetIds); + const bankId = await link.handoffPublicToken(institution, publicToken); + const ids = await link.addGoCardlessAccounts( + bankId, + accountIds, + offbudgetIds, + ); return ids; }; @@ -737,7 +745,7 @@ handlers['account-create'] = mutator(async function ({ }); if (balance != null && balance !== 0) { - let payee = await getStartingBalancePayee(); + const payee = await getStartingBalancePayee(); await db.insertTransaction({ account: id, @@ -766,7 +774,7 @@ handlers['account-close'] = mutator(async function ({ await handlers['account-unlink']({ id }); return withUndo(async () => { - let account = await db.first( + const account = await db.first( 'SELECT * FROM accounts WHERE id = ? AND tombstone = 0', [id], ); @@ -785,13 +793,13 @@ handlers['account-close'] = mutator(async function ({ if (numTransactions === 0) { await db.deleteAccount({ id }); } else if (forced) { - let rows = await db.runQuery( + const rows = await db.runQuery( 'SELECT id, transfer_id FROM v_transactions WHERE account = ?', [id], true, ); - let { id: payeeId } = await db.first( + const { id: payeeId } = await db.first( 'SELECT id FROM payees WHERE transfer_acct = ?', [id], ); @@ -828,7 +836,7 @@ handlers['account-close'] = mutator(async function ({ // If there is a balance we need to transfer it to the specified // account (and possibly categorize it) if (balance !== 0) { - let { id: payeeId } = await db.first( + const { id: payeeId } = await db.first( 'SELECT id FROM payees WHERE transfer_acct = ?', [transferAccountId], ); @@ -862,12 +870,12 @@ handlers['account-move'] = mutator(async function ({ id, targetId }) { let stopPolling = false; handlers['poll-web-token'] = async function ({ token }) { - let [[, userId], [, key]] = await asyncStorage.multiGet([ + const [[, userId], [, key]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); - let startTime = Date.now(); + const startTime = Date.now(); stopPolling = false; async function getData(cb) { @@ -880,7 +888,7 @@ handlers['poll-web-token'] = async function ({ token }) { return; } - let data = await post( + const data = await post( getServer().PLAID_SERVER + '/get-web-token-contents', { userId, @@ -917,7 +925,7 @@ handlers['poll-web-token-stop'] = async function () { }; handlers['accounts-sync'] = async function ({ id }) { - let [[, userId], [, userKey]] = await asyncStorage.multiGet([ + const [[, userId], [, userKey]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); @@ -933,7 +941,7 @@ handlers['accounts-sync'] = async function ({ id }) { accounts = accounts.filter(acct => acct.id === id); } - let errors = []; + const errors = []; let newTransactions = []; let matchedTransactions = []; let updatedAccounts = []; @@ -949,7 +957,7 @@ handlers['accounts-sync'] = async function ({ id }) { acct.account_id, acct.bankId, ); - let { added, updated } = res; + const { added, updated } = res; newTransactions = newTransactions.concat(added); matchedTransactions = matchedTransactions.concat(updated); @@ -998,7 +1006,7 @@ handlers['accounts-sync'] = async function ({ id }) { }; handlers['secret-set'] = async function ({ name, value }) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return { error: 'unauthorized' }; @@ -1022,7 +1030,7 @@ handlers['secret-set'] = async function ({ name, value }) { }; handlers['secret-check'] = async function (name) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return { error: 'unauthorized' }; @@ -1042,10 +1050,10 @@ handlers['gocardless-poll-web-token'] = async function ({ upgradingAccountId, requisitionId, }) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) return { error: 'unknown' }; - let startTime = Date.now(); + const startTime = Date.now(); stopPolling = false; async function getData(cb) { @@ -1058,7 +1066,7 @@ handlers['gocardless-poll-web-token'] = async function ({ return; } - let data = await post( + const data = await post( getServer().GOCARDLESS_SERVER + '/get-accounts', { upgradingAccountId, @@ -1133,7 +1141,7 @@ handlers['gocardless-create-web-token'] = async function ({ institutionId, accessValidForDays, }) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return { error: 'unauthorized' }; @@ -1158,7 +1166,7 @@ handlers['gocardless-create-web-token'] = async function ({ }; handlers['gocardless-accounts-sync'] = async function ({ id }) { - let [[, userId], [, userKey]] = await asyncStorage.multiGet([ + const [[, userId], [, userKey]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); @@ -1174,7 +1182,7 @@ handlers['gocardless-accounts-sync'] = async function ({ id }) { accounts = accounts.filter(acct => acct.id === id); } - let errors = []; + const errors = []; let newTransactions = []; let matchedTransactions = []; let updatedAccounts = []; @@ -1190,7 +1198,7 @@ handlers['gocardless-accounts-sync'] = async function ({ id }) { acct.account_id, acct.bankId, ); - let { added, updated } = res; + const { added, updated } = res; newTransactions = newTransactions.concat(added); matchedTransactions = matchedTransactions.concat(updated); @@ -1260,7 +1268,7 @@ handlers['transactions-import'] = mutator(function ({ }); handlers['account-unlink'] = mutator(async function ({ id }) { - let { bank: bankId } = await db.first( + const { bank: bankId } = await db.first( 'SELECT bank FROM accounts WHERE id = ?', [id], ); @@ -1278,20 +1286,20 @@ handlers['account-unlink'] = mutator(async function ({ id }) { balance_limit: null, }); - let { count } = await db.first( + const { count } = await db.first( 'SELECT COUNT(*) as count FROM accounts WHERE bank = ?', [bankId], ); // No more accounts are associated with this bank. We can remove // it from GoCardless. - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return 'ok'; } if (count === 0) { - let { bank_id: requisitionId } = await db.first( + const { bank_id: requisitionId } = await db.first( 'SELECT bank_id FROM banks WHERE id = ?', [bankId], ); @@ -1314,12 +1322,12 @@ handlers['account-unlink'] = mutator(async function ({ id }) { }); handlers['make-plaid-public-token'] = async function ({ bankId }) { - let [[, userId], [, userKey]] = await asyncStorage.multiGet([ + const [[, userId], [, userKey]] = await asyncStorage.multiGet([ 'user-id', 'user-key', ]); - let data = await post(getServer().PLAID_SERVER + '/make-public-token', { + const data = await post(getServer().PLAID_SERVER + '/make-public-token', { userId, key: userKey, item_id: '' + bankId, @@ -1355,7 +1363,7 @@ handlers['save-global-prefs'] = async function (prefs) { }; handlers['load-global-prefs'] = async function () { - let [ + const [ [, floatingSidebar], [, maxMonths], [, autoUpdate], @@ -1381,11 +1389,11 @@ handlers['load-global-prefs'] = async function () { }; handlers['save-prefs'] = async function (prefsToSet) { - let { cloudFileId } = prefs.getPrefs(); + const { cloudFileId } = prefs.getPrefs(); // Need to sync the budget name on the server as well if (prefsToSet.budgetName && cloudFileId) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); await post(getServer().SYNC_SERVER + '/update-user-filename', { token: userToken, @@ -1419,15 +1427,15 @@ handlers['key-make'] = async function ({ password }) { throw new Error('user-set-key must be called with file loaded'); } - let salt = encryption.randomBytes(32).toString('base64'); - let id = uuidv4(); - let key = await encryption.createKey({ id, password, salt }); + const salt = encryption.randomBytes(32).toString('base64'); + const id = uuidv4(); + const key = await encryption.createKey({ id, password, salt }); // Load the key await encryption.loadKey(key); // Make some test data to use if the key is valid or not - let testContent = await makeTestMessage(key.getId()); + const testContent = await makeTestMessage(key.getId()); // Changing your key necessitates a sync reset as well. This will // clear all existing encrypted data from the server so you won't @@ -1445,7 +1453,7 @@ handlers['key-make'] = async function ({ password }) { // This can be called both while a file is already loaded or not. This // will see if a key is valid and if so save it off. handlers['key-test'] = async function ({ fileId, password }) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (fileId == null) { fileId = prefs.getPrefs().cloudFileId; @@ -1462,15 +1470,16 @@ handlers['key-test'] = async function ({ fileId, password }) { return { error: { reason: 'network' } }; } - let { id, salt, test } = res; + const { id, salt, test: originalTest } = res; + let test = originalTest; if (test == null) { return { error: { reason: 'old-key-style' } }; } test = JSON.parse(test); - let key = await encryption.createKey({ id, password, salt }); + const key = await encryption.createKey({ id, password, salt }); encryption.loadKey(key); try { @@ -1484,7 +1493,7 @@ handlers['key-test'] = async function ({ fileId, password }) { } // Persist key in async storage - let keys = JSON.parse((await asyncStorage.getItem(`encrypt-keys`)) || '{}'); + const keys = JSON.parse((await asyncStorage.getItem(`encrypt-keys`)) || '{}'); keys[fileId] = key.serialize(); await asyncStorage.setItem('encrypt-keys', JSON.stringify(keys)); @@ -1556,7 +1565,7 @@ handlers['subscribe-get-user'] = async function () { return { offline: false }; } - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return null; @@ -1585,7 +1594,7 @@ handlers['subscribe-get-user'] = async function () { }; handlers['subscribe-change-password'] = async function ({ password }) { - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); if (!userToken) { return { error: 'not-logged-in' }; } @@ -1603,7 +1612,7 @@ handlers['subscribe-change-password'] = async function ({ password }) { }; handlers['subscribe-sign-in'] = async function ({ password }) { - let res = await post(getServer().SIGNUP_SERVER + '/login', { + const res = await post(getServer().SIGNUP_SERVER + '/login', { password, }); @@ -1656,9 +1665,12 @@ handlers['set-server-url'] = async function ({ url, validate = true }) { if (validate) { // Validate the server is running - let { error } = await runHandler(handlers['subscribe-needs-bootstrap'], { - url, - }); + const { error } = await runHandler( + handlers['subscribe-needs-bootstrap'], + { + url, + }, + ); if (error) { return { error }; } @@ -1758,7 +1770,7 @@ handlers['download-budget'] = async function ({ fileId }) { if (e.type === 'FileDownloadError') { if (e.reason === 'file-exists' && e.meta.id) { await prefs.loadPrefs(e.meta.id); - let name = prefs.getPrefs().budgetName; + const name = prefs.getPrefs().budgetName; prefs.unloadPrefs(); e.meta = { ...e.meta, name }; @@ -1771,7 +1783,7 @@ handlers['download-budget'] = async function ({ fileId }) { } } - let id = result.id; + const id = result.id; await handlers['load-budget']({ id }); result = await handlers['sync-budget'](); await handlers['close-budget'](); @@ -1784,13 +1796,13 @@ handlers['download-budget'] = async function ({ fileId }) { // open and sync, but don’t close handlers['sync-budget'] = async function () { setSyncingMode('enabled'); - let result = await initialFullSync(); + const result = await initialFullSync(); return result; }; handlers['load-budget'] = async function ({ id }) { - let currentPrefs = prefs.getPrefs(); + const currentPrefs = prefs.getPrefs(); if (currentPrefs) { if (currentPrefs.id === id) { @@ -1802,7 +1814,7 @@ handlers['load-budget'] = async function ({ id }) { } } - let res = await loadBudget(id); + const res = await loadBudget(id); return res; }; @@ -1854,7 +1866,7 @@ handlers['delete-budget'] = async function ({ id, cloudFileId }) { // If a local file exists, you can delete it by passing its local id if (id) { - let budgetDir = fs.getBudgetDir(id); + const budgetDir = fs.getBudgetDir(id); await fs.removeDirRecursively(budgetDir); } @@ -1885,14 +1897,14 @@ handlers['create-budget'] = async function ({ if (!budgetName) { // Unfortunately we need to load all of the existing files first // so we can detect conflicting names. - let files = await handlers['get-budgets'](); + const files = await handlers['get-budgets'](); budgetName = await uniqueFileName(files); } id = await idFromFileName(budgetName); } - let budgetDir = fs.getBudgetDir(id); + const budgetDir = fs.getBudgetDir(id); await fs.mkdir(budgetDir); // Create the initial database @@ -1905,7 +1917,7 @@ handlers['create-budget'] = async function ({ ); // Load it in - let { error } = await loadBudget(id); + const { error } = await loadBudget(id); if (error) { console.log('Error creating budget: ' + error); return { error }; @@ -1933,8 +1945,8 @@ handlers['import-budget'] = async function ({ filepath, type }) { throw new Error(`File not found at the provided path: ${filepath}`); } - let buffer = Buffer.from(await fs.readFile(filepath, 'binary')); - let results = await handleBudgetImport(type, filepath, buffer); + const buffer = Buffer.from(await fs.readFile(filepath, 'binary')); + const results = await handleBudgetImport(type, filepath, buffer); return results || {}; } catch (err) { err.message = 'Error importing budget: ' + err.message; @@ -1986,7 +1998,7 @@ async function loadBudget(id) { // Older versions didn't tag the file with the current user, so do // so now if (!prefs.getPrefs().userId) { - let userId = await asyncStorage.getItem('user-token'); + const userId = await asyncStorage.getItem('user-token'); prefs.savePrefs({ userId }); } @@ -2176,10 +2188,10 @@ export async function initApp(isDev, socketName) { await Promise.all([asyncStorage.init(), fs.init()]); await setupDocumentsDir(); - let keysStr = await asyncStorage.getItem('encrypt-keys'); + const keysStr = await asyncStorage.getItem('encrypt-keys'); if (keysStr) { try { - let keys = JSON.parse(keysStr); + const keys = JSON.parse(keysStr); // Load all the keys await Promise.all( @@ -2217,7 +2229,7 @@ export async function initApp(isDev, socketName) { connection.init(socketName, app.handlers); if (!isDev && !Platform.isMobile && !Platform.isWeb) { - let autoUpdate = await asyncStorage.getItem('auto-update'); + const autoUpdate = await asyncStorage.getItem('auto-update'); process.send({ type: 'shouldAutoUpdate', flag: autoUpdate == null || autoUpdate === 'true', @@ -2277,7 +2289,7 @@ export const lib = { getDataDir: fs.getDataDir, sendMessage: (msg, args) => connection.send(msg, args), send: async (name, args) => { - let res = await runHandler(app.handlers[name], args); + const res = await runHandler(app.handlers[name], args); return res; }, on: (name, func) => app.events.on(name, func), diff --git a/packages/loot-core/src/server/migrate/migrations.test.ts b/packages/loot-core/src/server/migrate/migrations.test.ts index c763190b6d5..e3a166daf2d 100644 --- a/packages/loot-core/src/server/migrate/migrations.test.ts +++ b/packages/loot-core/src/server/migrate/migrations.test.ts @@ -12,8 +12,8 @@ beforeEach(global.emptyDatabase(true)); describe('Migrations', () => { test('gets the latest migrations', async () => { - let applied = await getAppliedMigrations(db.getDatabase()); - let available = await getMigrationList( + const applied = await getAppliedMigrations(db.getDatabase()); + const available = await getMigrationList( __dirname + '/../../mocks/migrations', ); @@ -28,9 +28,9 @@ describe('Migrations', () => { async () => { await migrate(db.getDatabase()); - let migrations = await getAppliedMigrations(db.getDatabase()); - let last = 0; - for (let migration of migrations) { + const migrations = await getAppliedMigrations(db.getDatabase()); + const last = 0; + for (const migration of migrations) { if (migration <= last) { throw new Error('Found older migration out of order'); } diff --git a/packages/loot-core/src/server/migrate/migrations.ts b/packages/loot-core/src/server/migrate/migrations.ts index 07690ec37d5..db8dbebf1bb 100644 --- a/packages/loot-core/src/server/migrate/migrations.ts +++ b/packages/loot-core/src/server/migrate/migrations.ts @@ -10,7 +10,7 @@ import * as sqlite from '../../platform/server/sqlite'; let MIGRATIONS_DIR = fs.migrationsPath; -let javascriptMigrations = { +const javascriptMigrations = { 1632571489012: m1632571489012, }; @@ -18,7 +18,7 @@ export async function withMigrationsDir( dir: string, func: () => Promise, ): Promise { - let oldDir = MIGRATIONS_DIR; + const oldDir = MIGRATIONS_DIR; MIGRATIONS_DIR = dir; await func(); MIGRATIONS_DIR = oldDir; @@ -33,7 +33,7 @@ function getMigrationId(name: string): number { } export function getUpMigration(id, names) { - for (let m of names) { + for (const m of names) { if (getMigrationId(m) === id) { return m; } @@ -41,9 +41,9 @@ export function getUpMigration(id, names) { } async function patchBadMigrations(db: Database) { - let badFiltersMigration = 1685375406832; - let newFiltersMigration = 1688749527273; - let appliedIds = await getAppliedMigrations(db); + const badFiltersMigration = 1685375406832; + const newFiltersMigration = 1688749527273; + const appliedIds = await getAppliedMigrations(db); if (appliedIds.includes(badFiltersMigration)) { await sqlite.runQuery(db, 'DELETE FROM __migrations__ WHERE id = ?', [ badFiltersMigration, @@ -101,7 +101,7 @@ async function applyJavaScript(db, id) { throw new Error('Could not find JS migration code to run for ' + id); } - let run = javascriptMigrations[id]; + const run = javascriptMigrations[id]; return run(dbInterface, () => uuidv4()); } @@ -150,14 +150,14 @@ function checkDatabaseValidity( export async function migrate(db: Database): Promise { await patchBadMigrations(db); - let appliedIds = await getAppliedMigrations(db); - let available = await getMigrationList(MIGRATIONS_DIR); + const appliedIds = await getAppliedMigrations(db); + const available = await getMigrationList(MIGRATIONS_DIR); checkDatabaseValidity(appliedIds, available); const pending = getPending(appliedIds, available); - for (let migration of pending) { + for (const migration of pending) { await applyMigration(db, migration, MIGRATIONS_DIR); } diff --git a/packages/loot-core/src/server/models.ts b/packages/loot-core/src/server/models.ts index 1b2281553a3..fd419f20f69 100644 --- a/packages/loot-core/src/server/models.ts +++ b/packages/loot-core/src/server/models.ts @@ -57,7 +57,7 @@ export const categoryModel = { update, ); - let { sort_order, ...rest } = category; + const { sort_order, ...rest } = category; return { ...rest, hidden: rest.hidden ? 1 : 0 }; }, }; @@ -71,7 +71,7 @@ export const categoryGroupModel = { update, ); - let { sort_order, ...rest } = categoryGroup; + const { sort_order, ...rest } = categoryGroup; return { ...rest, hidden: rest.hidden ? 1 : 0 }; }, }; @@ -113,7 +113,7 @@ export const transactionModel = { ); } - let trans = { ...row }; + const trans = { ...row }; trans.error = row.error ? JSON.parse(row.error) : null; trans.isParent = row.isParent === 1 ? true : false; trans.isChild = row.isChild === 1 ? true : false; @@ -126,7 +126,7 @@ export const transactionModel = { }, fromJS(trans) { - let row = { ...trans }; + const row = { ...trans }; if ('error' in row) { row.error = trans.error ? JSON.stringify(trans.error) : null; } diff --git a/packages/loot-core/src/server/mutators.ts b/packages/loot-core/src/server/mutators.ts index c23900b6629..9d85c086157 100644 --- a/packages/loot-core/src/server/mutators.ts +++ b/packages/loot-core/src/server/mutators.ts @@ -1,10 +1,10 @@ import { captureException, captureBreadcrumb } from '../platform/exceptions'; import { sequential } from '../shared/async'; -let runningMethods = new Set(); +const runningMethods = new Set(); let currentContext = null; -let mutatingMethods = new WeakMap(); +const mutatingMethods = new WeakMap(); let globalMutationsEnabled = false; let _latestHandlerNames = []; @@ -62,7 +62,7 @@ export async function runHandler( await flushRunningMethods(); } - let promise = handler(args); + const promise = handler(args); runningMethods.add(promise); promise.then(() => { runningMethods.delete(promise); @@ -108,7 +108,7 @@ export function withMutatorContext( return func(); } - let prevContext = currentContext; + const prevContext = currentContext; currentContext = { ...currentContext, ...context }; return func().finally(() => { currentContext = prevContext; diff --git a/packages/loot-core/src/server/notes/app.ts b/packages/loot-core/src/server/notes/app.ts index bc76a5be192..e410ab7f377 100644 --- a/packages/loot-core/src/server/notes/app.ts +++ b/packages/loot-core/src/server/notes/app.ts @@ -3,7 +3,7 @@ import * as db from '../db'; import { NotesHandlers } from './types/handlers'; -let app = createApp(); +const app = createApp(); app.method('notes-save', async ({ id, note }) => { await db.update('notes', { id, note }); diff --git a/packages/loot-core/src/server/post.ts b/packages/loot-core/src/server/post.ts index 8919650630f..dee9cde6438 100644 --- a/packages/loot-core/src/server/post.ts +++ b/packages/loot-core/src/server/post.ts @@ -9,9 +9,9 @@ function throwIfNot200(res, text) { throw new PostError(res.status === 500 ? 'internal' : text); } - let contentType = res.headers.get('Content-Type'); + const contentType = res.headers.get('Content-Type'); if (contentType.toLowerCase().indexOf('application/json') !== -1) { - let json = JSON.parse(text); + const json = JSON.parse(text); throw new PostError(json.reason); } throw new PostError(text); diff --git a/packages/loot-core/src/server/prefs.ts b/packages/loot-core/src/server/prefs.ts index 0cb45bcdcd9..57e92cbfd1c 100644 --- a/packages/loot-core/src/server/prefs.ts +++ b/packages/loot-core/src/server/prefs.ts @@ -28,7 +28,7 @@ export async function loadPrefs(id?: string): Promise { } // delete released feature flags - let releasedFeatures = ['syncAccount']; + const releasedFeatures = ['syncAccount']; for (const feature of releasedFeatures) { delete prefs[`flags.${feature}`]; } @@ -75,7 +75,7 @@ export async function savePrefs( } if (process.env.NODE_ENV !== 'test') { - let prefsPath = fs.join(fs.getBudgetDir(prefs.id), 'metadata.json'); + const prefsPath = fs.join(fs.getBudgetDir(prefs.id), 'metadata.json'); await fs.writeFile(prefsPath, JSON.stringify(prefs)); } } diff --git a/packages/loot-core/src/server/rules/app.ts b/packages/loot-core/src/server/rules/app.ts index 47c0b73fb34..a1de1d821d5 100644 --- a/packages/loot-core/src/server/rules/app.ts +++ b/packages/loot-core/src/server/rules/app.ts @@ -32,7 +32,7 @@ function validateRule(rule: Partial) { return result.length ? result : null; } - let conditionErrors = runValidation( + const conditionErrors = runValidation( rule.conditions, cond => new Condition( @@ -44,7 +44,7 @@ function validateRule(rule: Partial) { ), ); - let actionErrors = runValidation(rule.actions, action => + const actionErrors = runValidation(rule.actions, action => action.op === 'link-schedule' ? new Action(action.op, null, action.value, null, ruleFieldTypes) : new Action( @@ -67,22 +67,22 @@ function validateRule(rule: Partial) { } // Expose functions to the client -let app = createApp(); +const app = createApp(); app.method('rule-validate', async function (rule) { - let error = validateRule(rule); + const error = validateRule(rule); return { error }; }); app.method( 'rule-add', mutator(async function (rule) { - let error = validateRule(rule); + const error = validateRule(rule); if (error) { return { error }; } - let id = await rules.insertRule(rule); + const id = await rules.insertRule(rule); return { id }; }), ); @@ -90,7 +90,7 @@ app.method( app.method( 'rule-update', mutator(async function (rule) { - let error = validateRule(rule); + const error = validateRule(rule); if (error) { return { error }; } @@ -113,8 +113,8 @@ app.method( let someDeletionsFailed = false; await batchMessages(async () => { - for (let id of ids) { - let res = await rules.deleteRule({ id }); + for (const id of ids) { + const res = await rules.deleteRule({ id }); if (res === false) { someDeletionsFailed = true; } @@ -146,7 +146,7 @@ app.method('rules-get', async function () { }); app.method('rule-get', async function ({ id }) { - let rule = rules.getRules().find(rule => rule.id === id); + const rule = rules.getRules().find(rule => rule.id === id); return rule ? rule.serialize() : null; }); diff --git a/packages/loot-core/src/server/schedules/app.test.ts b/packages/loot-core/src/server/schedules/app.test.ts index 7d34ce0d605..cee8b965711 100644 --- a/packages/loot-core/src/server/schedules/app.test.ts +++ b/packages/loot-core/src/server/schedules/app.test.ts @@ -23,12 +23,12 @@ beforeEach(async () => { describe('schedule app', () => { describe('utility', () => { it('conditions are updated when they exist', () => { - let conds = [ + const conds = [ { op: 'is', field: 'payee', value: 'FOO' }, { op: 'is', field: 'date', value: '2020-01-01' }, ]; - let updated = updateConditions(conds, [ + const updated = updateConditions(conds, [ { op: 'is', field: 'payee', @@ -41,12 +41,12 @@ describe('schedule app', () => { }); it('conditions are added if they don’t exist', () => { - let conds = [ + const conds = [ { op: 'contains', field: 'payee', value: 'FOO' }, { op: 'contains', field: 'notes', value: 'dflksjdflskdjf' }, ]; - let updated = updateConditions(conds, [ + const updated = updateConditions(conds, [ { op: 'is', field: 'payee', @@ -81,7 +81,7 @@ describe('schedule app', () => { describe('methods', () => { it('createSchedule creates a schedule', async () => { - let id = await createSchedule({ + const id = await createSchedule({ conditions: [ { op: 'is', @@ -98,7 +98,7 @@ describe('schedule app', () => { ], }); - let { + const { data: [row], } = await aqlQuery(q('schedules').filter({ id }).select('*')); @@ -114,7 +114,7 @@ describe('schedule app', () => { }); it('updateSchedule updates a schedule', async () => { - let id = await createSchedule({ + const id = await createSchedule({ conditions: [ { op: 'is', field: 'payee', value: 'foo' }, { @@ -175,7 +175,7 @@ describe('schedule app', () => { }); it('deleteSchedule deletes a schedule', async () => { - let id = await createSchedule({ + const id = await createSchedule({ conditions: [ { op: 'is', @@ -192,16 +192,16 @@ describe('schedule app', () => { ], }); - let { data: schedules } = await aqlQuery(q('schedules').select('*')); + const { data: schedules } = await aqlQuery(q('schedules').select('*')); expect(schedules.length).toBe(1); await deleteSchedule({ id }); - let { data: schedules2 } = await aqlQuery(q('schedules').select('*')); + const { data: schedules2 } = await aqlQuery(q('schedules').select('*')); expect(schedules2.length).toBe(0); }); it('setNextDate sets `next_date`', async () => { - let id = await createSchedule({ + const id = await createSchedule({ conditions: [ { op: 'is', @@ -218,7 +218,7 @@ describe('schedule app', () => { ], }); - let { data: ruleId } = await aqlQuery( + const { data: ruleId } = await aqlQuery( q('schedules').filter({ id }).calculate('rule'), ); diff --git a/packages/loot-core/src/server/schedules/app.ts b/packages/loot-core/src/server/schedules/app.ts index 7d3fcfbc71f..2d8ee9a3664 100644 --- a/packages/loot-core/src/server/schedules/app.ts +++ b/packages/loot-core/src/server/schedules/app.ts @@ -37,7 +37,7 @@ import { SchedulesHandlers } from './types/handlers'; // Utilities function zip(arr1, arr2) { - let result = []; + const result = []; for (let i = 0; i < arr1.length; i++) { result.push([arr1[i], arr2[i]]); } @@ -45,20 +45,20 @@ function zip(arr1, arr2) { } export function updateConditions(conditions, newConditions) { - let scheduleConds = extractScheduleConds(conditions); - let newScheduleConds = extractScheduleConds(newConditions); + const scheduleConds = extractScheduleConds(conditions); + const newScheduleConds = extractScheduleConds(newConditions); - let replacements = zip( + const replacements = zip( Object.values(scheduleConds), Object.values(newScheduleConds), ); - let updated = conditions.map(cond => { - let r = replacements.find(r => cond === r[0]); + const updated = conditions.map(cond => { + const r = replacements.find(r => cond === r[0]); return r && r[1] ? r[1] : cond; }); - let added = replacements + const added = replacements .filter(x => x[0] == null && x[1] != null) .map(x => x[1]); @@ -68,19 +68,19 @@ export function updateConditions(conditions, newConditions) { export function getNextDate(dateCond, start = new Date(currentDay())) { start = d.startOfDay(start); - let cond = new Condition( + const cond = new Condition( dateCond.op, 'date', dateCond.value, null, new Map(Object.entries({ date: 'date' })), ); - let value = cond.getValue(); + const value = cond.getValue(); if (value.type === 'date') { return value.date; } else if (value.type === 'recur') { - let dates = value.schedule.occurrences({ start, take: 1 }).toArray(); + const dates = value.schedule.occurrences({ start, take: 1 }).toArray(); if (dates.length > 0) { let date = dates[0].date; @@ -101,14 +101,14 @@ export async function getRuleForSchedule(id) { throw new Error('Schedule not attached to a rule'); } - let { data: ruleId } = await aqlQuery( + const { data: ruleId } = await aqlQuery( q('schedules').filter({ id }).calculate('rule'), ); return getRules().find(rule => rule.id === ruleId); } async function fixRuleForSchedule(id) { - let { data: ruleId } = await aqlQuery( + const { data: ruleId } = await aqlQuery( q('schedules').filter({ id }).calculate('rule'), ); @@ -118,7 +118,7 @@ async function fixRuleForSchedule(id) { await db.delete_('rules', ruleId); } - let newId = await insertRule({ + const newId = await insertRule({ stage: null, conditionsOp: 'and', conditions: [ @@ -145,22 +145,22 @@ export async function setNextDate({ reset?: boolean; }) { if (conditions == null) { - let rule = await getRuleForSchedule(id); + const rule = await getRuleForSchedule(id); if (rule == null) { throw new Error('No rule found for schedule'); } conditions = rule.serialize().conditions; } - let { date: dateCond } = extractScheduleConds(conditions); + const { date: dateCond } = extractScheduleConds(conditions); - let { data: nextDate } = await aqlQuery( + const { data: nextDate } = await aqlQuery( q('schedules').filter({ id }).calculate('next_date'), ); // Only do this if a date condition exists if (dateCond) { - let newNextDate = getNextDate( + const newNextDate = getNextDate( dateCond, start ? start(nextDate) : new Date(), ); @@ -168,7 +168,7 @@ export async function setNextDate({ if (newNextDate !== nextDate) { // Our `update` functon requires the id of the item and we don't // have it, so we need to query it - let nd = await db.first( + const nd = await db.first( 'SELECT id, base_next_date_ts FROM schedules_next_date WHERE schedule_id = ?', [id], ); @@ -194,7 +194,7 @@ export async function setNextDate({ // Methods async function checkIfScheduleExists(name, scheduleId) { - let idForName = await db.first( + const idForName = await db.first( 'SELECT id from schedules WHERE tombstone = 0 AND name = ?', [name], ); @@ -212,9 +212,9 @@ export async function createSchedule({ schedule = null, conditions = [], } = {}) { - let scheduleId = schedule?.id || uuidv4(); + const scheduleId = schedule?.id || uuidv4(); - let { date: dateCond } = extractScheduleConds(conditions); + const { date: dateCond } = extractScheduleConds(conditions); if (dateCond == null) { throw new Error('A date condition is required to create a schedule'); } @@ -222,8 +222,8 @@ export async function createSchedule({ throw new Error('Date is required'); } - let nextDate = getNextDate(dateCond); - let nextDateRepr = nextDate ? toDateRepr(nextDate) : null; + const nextDate = getNextDate(dateCond); + const nextDateRepr = nextDate ? toDateRepr(nextDate) : null; if (schedule) { if (schedule.name) { if (await checkIfScheduleExists(schedule.name, scheduleId)) { @@ -235,14 +235,14 @@ export async function createSchedule({ } // Create the rule here based on the info - let ruleId = await insertRule({ + const ruleId = await insertRule({ stage: null, conditionsOp: 'and', conditions, actions: [{ op: 'link-schedule', value: scheduleId }], }); - let now = Date.now(); + const now = Date.now(); await db.insertWithUUID('schedules_next_date', { schedule_id: scheduleId, local_next_date: nextDateRepr, @@ -279,7 +279,7 @@ export async function updateSchedule({ // This must be outside the `batchMessages` call because we change // and then read data if (conditions) { - let { date: dateCond } = extractScheduleConds(conditions); + const { date: dateCond } = extractScheduleConds(conditions); if (dateCond && dateCond.value == null) { throw new Error('Date is required'); } @@ -299,13 +299,13 @@ export async function updateSchedule({ await batchMessages(async () => { if (conditions) { - let oldConditions = rule.serialize().conditions; - let newConditions = updateConditions(oldConditions, conditions); + const oldConditions = rule.serialize().conditions; + const newConditions = updateConditions(oldConditions, conditions); await updateRule({ id: rule.id, conditions: newConditions }); // Annoyingly, sometimes it has `type` and sometimes it doesn't - let stripType = ({ type, ...fields }) => fields; + const stripType = ({ type, ...fields }) => fields; // Update `next_date` if the user forced it, or if the account // or date changed. We check account because we don't update @@ -337,7 +337,7 @@ export async function updateSchedule({ } export async function deleteSchedule({ id }) { - let { data: ruleId } = await aqlQuery( + const { data: ruleId } = await aqlQuery( q('schedules').filter({ id }).calculate('rule'), ); @@ -360,10 +360,10 @@ function discoverSchedules() { } async function getUpcomingDates({ config, count }) { - let rules = recurConfigToRSchedule(config); + const rules = recurConfigToRSchedule(config); try { - let schedule = new RSchedule({ rrules: rules }); + const schedule = new RSchedule({ rrules: rules }); return schedule .occurrences({ start: d.startOfDay(new Date()), take: count }) @@ -383,19 +383,19 @@ async function getUpcomingDates({ config, count }) { // Services function onRuleUpdate(rule) { - let { actions, conditions } = + const { actions, conditions } = rule instanceof Rule ? rule.serialize() : ruleModel.toJS(rule); if (actions && actions.find(a => a.op === 'link-schedule')) { - let scheduleId = actions.find(a => a.op === 'link-schedule').value; + const scheduleId = actions.find(a => a.op === 'link-schedule').value; if (scheduleId) { - let conds = extractScheduleConds(conditions); + const conds = extractScheduleConds(conditions); - let payeeIdx = conditions.findIndex(c => c === conds.payee); - let accountIdx = conditions.findIndex(c => c === conds.account); - let amountIdx = conditions.findIndex(c => c === conds.amount); - let dateIdx = conditions.findIndex(c => c === conds.date); + const payeeIdx = conditions.findIndex(c => c === conds.payee); + const accountIdx = conditions.findIndex(c => c === conds.account); + const amountIdx = conditions.findIndex(c => c === conds.amount); + const dateIdx = conditions.findIndex(c => c === conds.date); db.runQuery( 'INSERT OR REPLACE INTO schedules_json_paths (schedule_id, payee, account, amount, date) VALUES (?, ?, ?, ?, ?)', @@ -436,13 +436,13 @@ function onApplySync(oldValues, newValues) { // posts transactions async function postTransactionForSchedule({ id }: { id: string }) { - let { data } = await aqlQuery(q('schedules').filter({ id }).select('*')); - let schedule = data[0]; + const { data } = await aqlQuery(q('schedules').filter({ id }).select('*')); + const schedule = data[0]; if (schedule == null || schedule._account == null) { return; } - let transaction = { + const transaction = { payee: schedule._payee, account: schedule._account, amount: getScheduledAmount(schedule._amount), @@ -460,21 +460,23 @@ async function postTransactionForSchedule({ id }: { id: string }) { async function advanceSchedulesService(syncSuccess) { // Move all paid schedules - let { data: schedules } = await aqlQuery( + const { data: schedules } = await aqlQuery( q('schedules') .filter({ completed: false, '_account.closed': false }) .select('*'), ); - let { data: hasTransData } = await aqlQuery( + const { data: hasTransData } = await aqlQuery( getHasTransactionsQuery(schedules), ); - let hasTrans = new Set(hasTransData.filter(Boolean).map(row => row.schedule)); + const hasTrans = new Set( + hasTransData.filter(Boolean).map(row => row.schedule), + ); - let failedToPost = []; + const failedToPost = []; let didPost = false; - for (let schedule of schedules) { - let status = getStatus( + for (const schedule of schedules) { + const status = getStatus( schedule.next_date, schedule.completed, hasTrans.has(schedule.id), @@ -531,7 +533,7 @@ async function advanceSchedulesService(syncSuccess) { } // Expose functions to the client -let app = createApp(); +const app = createApp(); app.method('schedule/create', mutator(undoable(createSchedule))); app.method('schedule/update', mutator(undoable(updateSchedule))); @@ -551,11 +553,11 @@ app.method('schedule/get-upcoming-dates', getUpcomingDates); app.service(trackJSONPaths); app.events.on('sync', ({ type, subtype }) => { - let completeEvent = + const completeEvent = type === 'success' || type === 'error' || type === 'unauthorized'; if (completeEvent && prefs.getPrefs()) { - let { lastScheduleRun } = prefs.getPrefs(); + const { lastScheduleRun } = prefs.getPrefs(); if (lastScheduleRun !== currentDay()) { runMutator(() => advanceSchedulesService(type === 'success')); diff --git a/packages/loot-core/src/server/schedules/find-schedules.ts b/packages/loot-core/src/server/schedules/find-schedules.ts index b2014306bbe..6ac6e7999e7 100644 --- a/packages/loot-core/src/server/schedules/find-schedules.ts +++ b/packages/loot-core/src/server/schedules/find-schedules.ts @@ -15,7 +15,7 @@ import { Schedule as RSchedule } from '../util/rschedule'; import { SchedulesHandlers } from './types/handlers'; function takeDates(config) { - let schedule = new RSchedule({ rrules: recurConfigToRSchedule(config) }); + const schedule = new RSchedule({ rrules: recurConfigToRSchedule(config) }); return schedule .occurrences({ take: 3 }) .toArray() @@ -23,7 +23,7 @@ function takeDates(config) { } async function getTransactions(date, account) { - let { data } = await aqlQuery( + const { data } = await aqlQuery( q('transactions') .filter({ account, @@ -42,7 +42,9 @@ async function getTransactions(date, account) { } function getRank(day1, day2) { - let dayDiff = Math.abs(d.differenceInDays(parseDate(day1), parseDate(day2))); + const dayDiff = Math.abs( + d.differenceInDays(parseDate(day1), parseDate(day2)), + ); // The amount of days off determines the rank: exact same day // is highest rank 1, 1 day off is .5, etc. This will find the @@ -52,15 +54,15 @@ function getRank(day1, day2) { function matchSchedules(allOccurs, config, partialMatchRank = 0.5) { allOccurs = [...allOccurs].reverse(); - let baseOccur = allOccurs[0]; - let occurs = allOccurs.slice(1); - let schedules = []; + const baseOccur = allOccurs[0]; + const occurs = allOccurs.slice(1); + const schedules = []; - for (let trans of baseOccur.transactions) { - let threshold = getApproxNumberThreshold(trans.amount); - let payee = trans.payee; + for (const trans of baseOccur.transactions) { + const threshold = getApproxNumberThreshold(trans.amount); + const payee = trans.payee; - let found = occurs.map(occur => { + const found = occurs.map(occur => { let matched = occur.transactions.find( t => t.amount >= trans.amount - threshold && @@ -78,12 +80,12 @@ function matchSchedules(allOccurs, config, partialMatchRank = 0.5) { continue; } - let rank = found.reduce( + const rank = found.reduce( (total, match) => total + match.rank, getRank(baseOccur.date, trans.date), ); - let exactAmount = found.reduce( + const exactAmount = found.reduce( (exact, match) => exact && match.trans.amount === trans.amount, true, ); @@ -114,7 +116,7 @@ async function schedulesForPattern( let schedules = []; for (let i = 0; i < numDays; i++) { - let start = d.addDays(baseStart, i); + const start = d.addDays(baseStart, i); let config; if (typeof baseConfig === 'function') { config = baseConfig(start); @@ -130,9 +132,9 @@ async function schedulesForPattern( // Our recur config expects a day string, not a native date format config.start = dayFromDate(config.start); - let data = []; - let dates = takeDates(config); - for (let date of dates) { + const data = []; + const dates = takeDates(config); + for (const date of dates) { data.push({ date: dayFromDate(date), transactions: await getTransactions(date, accountId), @@ -188,7 +190,7 @@ async function monthly(startDate, accountId) { async function monthlyLastDay(startDate, accountId) { // We do two separate calls because this pattern doesn't fit into // how `schedulesForPattern` works - let s1 = await schedulesForPattern( + const s1 = await schedulesForPattern( d.subMonths(parseDate(startDate), 3), 1, { frequency: 'monthly', patterns: [{ type: 'day', value: -1 }] }, @@ -198,7 +200,7 @@ async function monthlyLastDay(startDate, accountId) { 0.75, ); - let s2 = await schedulesForPattern( + const s2 = await schedulesForPattern( d.subMonths(parseDate(startDate), 4), 1, { frequency: 'monthly', patterns: [{ type: 'day', value: -1 }] }, @@ -214,8 +216,8 @@ async function monthly1stor3rd(startDate, accountId) { d.subWeeks(parseDate(startDate), 8), 14, start => { - let day = d.format(new Date(), 'iiii'); - let dayValue = day.slice(0, 2).toUpperCase(); + const day = d.format(new Date(), 'iiii'); + const dayValue = day.slice(0, 2).toUpperCase(); return { start, @@ -235,8 +237,8 @@ async function monthly2ndor4th(startDate, accountId) { d.subMonths(parseDate(startDate), 8), 14, start => { - let day = d.format(new Date(), 'iiii'); - let dayValue = day.slice(0, 2).toUpperCase(); + const day = d.format(new Date(), 'iiii'); + const dayValue = day.slice(0, 2).toUpperCase(); return { start, @@ -252,12 +254,12 @@ async function monthly2ndor4th(startDate, accountId) { } async function findStartDate(schedule) { - let conditions = schedule._conditions; - let dateCond = conditions.find(c => c.field === 'date'); + const conditions = schedule._conditions; + const dateCond = conditions.find(c => c.field === 'date'); let currentConfig = dateCond.value; while (1) { - let prevConfig = currentConfig; + const prevConfig = currentConfig; currentConfig = { ...prevConfig }; switch (currentConfig.frequency) { @@ -290,11 +292,11 @@ async function findStartDate(schedule) { throw new Error('findStartDate: invalid frequency'); } - let newConditions = conditions.map(c => + const newConditions = conditions.map(c => c.field === 'date' ? { ...c, value: currentConfig } : c, ); - let { filters, errors } = conditionsToAQL(newConditions, { + const { filters, errors } = conditionsToAQL(newConditions, { recurDateBounds: 1, }); if (errors.length > 0) { @@ -304,7 +306,7 @@ async function findStartDate(schedule) { break; } - let { data } = await aqlQuery( + const { data } = await aqlQuery( q('transactions').filter({ $and: filters }).select('*'), ); @@ -338,21 +340,21 @@ export async function findSchedules() { // Search for them approx (+- 2 days) but track which transactions // and find the best one... - let { data: accounts } = await aqlQuery( + const { data: accounts } = await aqlQuery( q('accounts').filter({ closed: false }).select('*'), ); let allSchedules = []; - for (let account of accounts) { + for (const account of accounts) { // Find latest transaction-ish to start with - let latestTrans = await db.first( + const latestTrans = await db.first( 'SELECT * FROM v_transactions WHERE account = ? AND parent_id IS NULL ORDER BY date DESC LIMIT 1', [account.id], ); if (latestTrans) { - let latestDate = fromDateRepr(latestTrans.date); + const latestDate = fromDateRepr(latestTrans.date); allSchedules = allSchedules.concat( await weekly(latestDate, account.id), await every2weeks(latestDate, account.id), @@ -364,10 +366,10 @@ export async function findSchedules() { } } - let schedules = [...groupBy(allSchedules, 'payee').entries()].map( + const schedules = [...groupBy(allSchedules, 'payee').entries()].map( ([payeeId, schedules]) => { schedules.sort((s1, s2) => s2.rank - s1.rank); - let winner = schedules[0]; + const winner = schedules[0]; // Convert to schedule and return it return { @@ -396,7 +398,7 @@ export async function findSchedules() { const finalized: Awaited> = []; - for (let schedule of schedules) { + for (const schedule of schedules) { finalized.push(await findStartDate(schedule)); } return finalized; diff --git a/packages/loot-core/src/server/server-config.ts b/packages/loot-core/src/server/server-config.ts index ab0e72096dc..cde33082465 100644 --- a/packages/loot-core/src/server/server-config.ts +++ b/packages/loot-core/src/server/server-config.ts @@ -11,7 +11,7 @@ type ServerConfig = { let config: ServerConfig | null = null; function joinURL(base: string | URL, ...paths: string[]): string { - let url = new URL(base); + const url = new URL(base); url.pathname = fs.join(...paths); return url.toString(); } diff --git a/packages/loot-core/src/server/sheet.test.ts b/packages/loot-core/src/server/sheet.test.ts index 82c8861d1a4..c497db0ecb8 100644 --- a/packages/loot-core/src/server/sheet.test.ts +++ b/packages/loot-core/src/server/sheet.test.ts @@ -41,7 +41,7 @@ async function insertTransactions() { describe('Spreadsheet', () => { test('transferring a category triggers an update', async () => { - let spreadsheet = await sheet.loadSpreadsheet(db); + const spreadsheet = await sheet.loadSpreadsheet(db); await insertTransactions(); spreadsheet.startTransaction(); @@ -69,7 +69,7 @@ describe('Spreadsheet', () => { }); test('updating still works after transferring categories', async () => { - let spreadsheet = await sheet.loadSpreadsheet(db); + const spreadsheet = await sheet.loadSpreadsheet(db); await insertTransactions(); await db.deleteCategory({ id: 'cat1' }, 'cat2'); diff --git a/packages/loot-core/src/server/sheet.ts b/packages/loot-core/src/server/sheet.ts index 0420f27dc2e..792b176a431 100644 --- a/packages/loot-core/src/server/sheet.ts +++ b/packages/loot-core/src/server/sheet.ts @@ -41,7 +41,7 @@ function setCacheStatus( ) { if (clean) { // Generate random number and stick in both places - let num = Math.random() * 10000000; + const num = Math.random() * 10000000; sqlite.runQuery( cacheDb, 'INSERT OR REPLACE INTO kvcache_key (id, key) VALUES (1, ?)', @@ -67,14 +67,14 @@ function isCacheDirty(mainDb: Database, cacheDb: Database): boolean { [], true, ); - let num = rows.length === 0 ? null : rows[0].key; + const num = rows.length === 0 ? null : rows[0].key; if (num == null) { return true; } if (mainDb) { - let rows = sqlite.runQuery<{ key?: number }>( + const rows = sqlite.runQuery<{ key?: number }>( mainDb, 'SELECT key FROM kvcache_key WHERE id = 1', [], @@ -95,8 +95,8 @@ export async function loadSpreadsheet( db, onSheetChange?, ): Promise { - let cacheEnabled = process.env.NODE_ENV !== 'test'; - let mainDb = db.getDatabase(); + const cacheEnabled = process.env.NODE_ENV !== 'test'; + const mainDb = db.getDatabase(); let cacheDb; if (Platform.isDesktop && cacheEnabled) { @@ -104,7 +104,9 @@ export async function loadSpreadsheet( // much more likely to directly work with files on desktop, and this makes // it a lot clearer what the true filesize of the main db is (and avoid // copying the cache data around). - let cachePath = db.getDatabasePath().replace(/db\.sqlite$/, 'cache.sqlite'); + const cachePath = db + .getDatabasePath() + .replace(/db\.sqlite$/, 'cache.sqlite'); globalCacheDb = cacheDb = sqlite.openDatabase(cachePath); sqlite.execQuery( @@ -142,7 +144,7 @@ export async function loadSpreadsheet( } if (cacheEnabled && !isCacheDirty(mainDb, cacheDb)) { - let cachedRows = await sqlite.runQuery<{ key?: number; value: string }>( + const cachedRows = await sqlite.runQuery<{ key?: number; value: string }>( cacheDb, 'SELECT * FROM kvcache', [], @@ -150,8 +152,8 @@ export async function loadSpreadsheet( ); console.log(`Loaded spreadsheet from cache (${cachedRows.length} items)`); - for (let row of cachedRows) { - let parsed = JSON.parse(row.value); + for (const row of cachedRows) { + const parsed = JSON.parse(row.value); sheet.load(row.key, parsed); } } else { @@ -188,15 +190,15 @@ export async function reloadSpreadsheet(db): Promise { } export async function loadUserBudgets(db): Promise { - let sheet = globalSheet; + const sheet = globalSheet; // TODO: Clear out the cache here so make sure future loads of the app // don't load any extra values that aren't set here - let { budgetType } = prefs.getPrefs() || {}; + const { budgetType } = prefs.getPrefs() || {}; - let table = budgetType === 'report' ? 'reflect_budgets' : 'zero_budgets'; - let budgets = await db.all(` + const table = budgetType === 'report' ? 'reflect_budgets' : 'zero_budgets'; + const budgets = await db.all(` SELECT * FROM ${table} b LEFT JOIN categories c ON c.id = b.category WHERE c.tombstone = 0 @@ -205,9 +207,9 @@ export async function loadUserBudgets(db): Promise { sheet.startTransaction(); // Load all the budget amounts and carryover values - for (let budget of budgets) { + for (const budget of budgets) { if (budget.month && budget.category) { - let sheetName = `budget${budget.month}`; + const sheetName = `budget${budget.month}`; sheet.set(`${sheetName}!budget-${budget.category}`, budget.amount); sheet.set( `${sheetName}!carryover-${budget.category}`, @@ -219,9 +221,9 @@ export async function loadUserBudgets(db): Promise { // For zero-based budgets, load the buffered amounts if (budgetType !== 'report') { - let budgetMonths = await db.all('SELECT * FROM zero_budget_months'); - for (let budgetMonth of budgetMonths) { - let sheetName = sheetForMonth(budgetMonth.id); + const budgetMonths = await db.all('SELECT * FROM zero_budget_months'); + for (const budgetMonth of budgetMonths) { + const sheetName = sheetForMonth(budgetMonth.id); sheet.set(`${sheetName}!buffered`, budgetMonth.buffered); } } diff --git a/packages/loot-core/src/server/spreadsheet/graph-data-structure.ts b/packages/loot-core/src/server/spreadsheet/graph-data-structure.ts index b4e4ee9a1a9..1ef929fee70 100644 --- a/packages/loot-core/src/server/spreadsheet/graph-data-structure.ts +++ b/packages/loot-core/src/server/spreadsheet/graph-data-structure.ts @@ -1,5 +1,5 @@ function Graph() { - let graph = { + const graph = { addNode, removeNode, adjacent, @@ -12,8 +12,8 @@ function Graph() { getEdges, }; - let edges = new Map(); - let incomingEdges = new Map(); + const edges = new Map(); + const incomingEdges = new Map(); function getEdges() { return { edges, incomingEdges }; @@ -29,7 +29,7 @@ function Graph() { const incoming = adjacentIncoming(node); incomingEdges.set(node, new Set()); - let iter = incoming.values(); + const iter = incoming.values(); let cur = iter.next(); while (!cur.done) { removeEdge(cur.value, node); @@ -78,7 +78,7 @@ function Graph() { function topologicalSortUntil(name, visited, sorted) { visited.add(name); - let iter = adjacent(name).values(); + const iter = adjacent(name).values(); let cur = iter.next(); while (!cur.done) { if (!visited.has(cur.value)) { @@ -104,7 +104,7 @@ function Graph() { } function generateDOT() { - let edgeStrings = []; + const edgeStrings = []; edges.forEach(function (adj, edge) { if (adj.length !== 0) { edgeStrings.push(`${edge} -> {${adj.join(',')}}`); diff --git a/packages/loot-core/src/server/spreadsheet/spreadsheet.test.ts b/packages/loot-core/src/server/spreadsheet/spreadsheet.test.ts index 381444eb0c8..f9cf6c970d6 100644 --- a/packages/loot-core/src/server/spreadsheet/spreadsheet.test.ts +++ b/packages/loot-core/src/server/spreadsheet/spreadsheet.test.ts @@ -127,7 +127,7 @@ describe('Spreadsheet', () => { test('querying deep join works', async () => { const spreadsheet = new Spreadsheet(db); await db.insertPayee({ name: '', transfer_acct: '1' }); - let payeeId2 = await db.insertPayee({ name: '', transfer_acct: '2' }); + const payeeId2 = await db.insertPayee({ name: '', transfer_acct: '2' }); await insertTransactions(payeeId2); spreadsheet.set( diff --git a/packages/loot-core/src/server/spreadsheet/spreadsheet.ts b/packages/loot-core/src/server/spreadsheet/spreadsheet.ts index 22b66067ede..bdb57348057 100644 --- a/packages/loot-core/src/server/spreadsheet/spreadsheet.ts +++ b/packages/loot-core/src/server/spreadsheet/spreadsheet.ts @@ -142,7 +142,7 @@ export default class Spreadsheet { this.running = true; while (idx < this.computeQueue.length) { - let name = this.computeQueue[idx]; + const name = this.computeQueue[idx]; let node; let result; @@ -150,7 +150,7 @@ export default class Spreadsheet { node = this.getNode(name); if (node._run) { - let args = node._dependencies.map(dep => { + const args = node._dependencies.map(dep => { return this.getNode(dep).value; }); @@ -240,7 +240,7 @@ export default class Spreadsheet { endCacheBarrier() { this.cacheBarrier = false; - let pendingChange = this.running || this.computeQueue.length > 0; + const pendingChange = this.running || this.computeQueue.length > 0; if (!pendingChange) { this.markCacheSafe(); } @@ -264,7 +264,7 @@ export default class Spreadsheet { return () => {}; } - let remove = this.addEventListener('change', (...args) => { + const remove = this.addEventListener('change', (...args) => { remove(); return func(...args); }); @@ -292,7 +292,7 @@ export default class Spreadsheet { } getCellValueLoose(sheetName, cellName) { - let name = resolveName(sheetName, cellName); + const name = resolveName(sheetName, cellName); if (this.nodes.has(name)) { return this.getNode(name).value; } @@ -338,12 +338,16 @@ export default class Spreadsheet { } createQuery(sheetName: string, cellName: string, query: string): void { - let name = resolveName(sheetName, cellName); - let node = this._getNode(name); + const name = resolveName(sheetName, cellName); + const node = this._getNode(name); if (node.query !== query) { node.query = query; - let { sqlPieces, state } = compileQuery(node.query, schema, schemaConfig); + const { sqlPieces, state } = compileQuery( + node.query, + schema, + schemaConfig, + ); node.sql = { sqlPieces, state }; this.transaction(() => { @@ -357,8 +361,8 @@ export default class Spreadsheet { cellName: string, initialValue: number | boolean, ): void { - let name = resolveName(sheetName, cellName); - let exists = this.nodes.has(name); + const name = resolveName(sheetName, cellName); + const exists = this.nodes.has(name); if (!exists) { this.create(name, initialValue); } @@ -379,8 +383,8 @@ export default class Spreadsheet { refresh?: boolean; }, ): void { - let name = resolveName(sheetName, cellName); - let node = this._getNode(name); + const name = resolveName(sheetName, cellName); + const node = this._getNode(name); if (node.dynamic) { // If it already exists, do nothing @@ -418,7 +422,7 @@ export default class Spreadsheet { } clearSheet(sheetName: string): void { - for (let [name, node] of this.nodes.entries()) { + for (const [name, node] of this.nodes.entries()) { if (node.sheet === sheetName) { this.nodes.delete(name); } @@ -426,7 +430,7 @@ export default class Spreadsheet { } voidCell(sheetName: string, name: string, voidValue = null): void { - let node = this.getNode(resolveName(sheetName, name)); + const node = this.getNode(resolveName(sheetName, name)); node._run = null; node.dynamic = false; node.value = voidValue; @@ -438,7 +442,7 @@ export default class Spreadsheet { } addDependencies(sheetName: string, cellName: string, deps: string[]): void { - let name = resolveName(sheetName, cellName); + const name = resolveName(sheetName, cellName); deps = deps.map(dep => { if (!unresolveName(dep).sheet) { @@ -447,8 +451,8 @@ export default class Spreadsheet { return dep; }); - let node = this.getNode(name); - let newDeps = deps.filter( + const node = this.getNode(name); + const newDeps = deps.filter( dep => (node._dependencies || []).indexOf(dep) === -1, ); @@ -466,7 +470,7 @@ export default class Spreadsheet { cellName: string, deps: string[], ): void { - let name = resolveName(sheetName, cellName); + const name = resolveName(sheetName, cellName); deps = deps.map(dep => { if (!unresolveName(dep).sheet) { @@ -475,7 +479,7 @@ export default class Spreadsheet { return dep; }); - let node = this.getNode(name); + const node = this.getNode(name); node._dependencies = (node._dependencies || []).filter( dep => deps.indexOf(dep) === -1, @@ -492,7 +496,7 @@ export default class Spreadsheet { } triggerDatabaseChanges(oldValues, newValues) { - let tables = new Set([...oldValues.keys(), ...newValues.keys()]); + const tables = new Set([...oldValues.keys(), ...newValues.keys()]); this.startTransaction(); // TODO: Create an index of deps so we don't have to iterate diff --git a/packages/loot-core/src/server/spreadsheet/util.ts b/packages/loot-core/src/server/spreadsheet/util.ts index 001772ed343..9d59a8f4ecf 100644 --- a/packages/loot-core/src/server/spreadsheet/util.ts +++ b/packages/loot-core/src/server/spreadsheet/util.ts @@ -1,5 +1,5 @@ export function unresolveName(name) { - let idx = name.indexOf('!'); + const idx = name.indexOf('!'); if (idx !== -1) { return { sheet: name.slice(0, idx), diff --git a/packages/loot-core/src/server/sync/encoder.ts b/packages/loot-core/src/server/sync/encoder.ts index 0dd779750a1..45999aa79c1 100644 --- a/packages/loot-core/src/server/sync/encoder.ts +++ b/packages/loot-core/src/server/sync/encoder.ts @@ -23,23 +23,23 @@ export async function encode( since: Timestamp | string, messages: Message[], ): Promise { - let { encryptKeyId } = prefs.getPrefs(); - let requestPb = new SyncProtoBuf.SyncRequest(); + const { encryptKeyId } = prefs.getPrefs(); + const requestPb = new SyncProtoBuf.SyncRequest(); for (let i = 0; i < messages.length; i++) { - let msg = messages[i]; - let envelopePb = new SyncProtoBuf.MessageEnvelope(); + const msg = messages[i]; + const envelopePb = new SyncProtoBuf.MessageEnvelope(); envelopePb.setTimestamp(msg.timestamp.toString()); - let messagePb = new SyncProtoBuf.Message(); + const messagePb = new SyncProtoBuf.Message(); messagePb.setDataset(msg.dataset); messagePb.setRow(msg.row); messagePb.setColumn(msg.column); messagePb.setValue(msg.value as string); - let binaryMsg = messagePb.serializeBinary(); + const binaryMsg = messagePb.serializeBinary(); if (encryptKeyId) { - let encrypted = new SyncProtoBuf.EncryptedData(); + const encrypted = new SyncProtoBuf.EncryptedData(); let result; try { @@ -74,21 +74,21 @@ export async function encode( export async function decode( data: Uint8Array, ): Promise<{ messages: Message[]; merkle: { hash: number } }> { - let { encryptKeyId } = prefs.getPrefs(); + const { encryptKeyId } = prefs.getPrefs(); - let responsePb = SyncProtoBuf.SyncResponse.deserializeBinary(data); - let merkle = JSON.parse(responsePb.getMerkle()); - let list = responsePb.getMessagesList(); - let messages = []; + const responsePb = SyncProtoBuf.SyncResponse.deserializeBinary(data); + const merkle = JSON.parse(responsePb.getMerkle()); + const list = responsePb.getMessagesList(); + const messages = []; for (let i = 0; i < list.length; i++) { - let envelopePb = list[i]; - let timestamp = Timestamp.parse(envelopePb.getTimestamp()); - let encrypted = envelopePb.getIsencrypted(); + const envelopePb = list[i]; + const timestamp = Timestamp.parse(envelopePb.getTimestamp()); + const encrypted = envelopePb.getIsencrypted(); let msg; if (encrypted) { - let binary = SyncProtoBuf.EncryptedData.deserializeBinary( + const binary = SyncProtoBuf.EncryptedData.deserializeBinary( envelopePb.getContent() as Uint8Array, ); diff --git a/packages/loot-core/src/server/sync/index.ts b/packages/loot-core/src/server/sync/index.ts index 114c337906b..344ecfec2d0 100644 --- a/packages/loot-core/src/server/sync/index.ts +++ b/packages/loot-core/src/server/sync/index.ts @@ -32,12 +32,12 @@ export { default as makeTestMessage } from './make-test-message'; export { default as resetSync } from './reset'; export { default as repairSync } from './repair'; -let FULL_SYNC_DELAY = 1000; +const FULL_SYNC_DELAY = 1000; let SYNCING_MODE = 'enabled'; type SyncingMode = 'enabled' | 'offline' | 'disabled' | 'import'; export function setSyncingMode(mode: SyncingMode) { - let prevMode = SYNCING_MODE; + const prevMode = SYNCING_MODE; switch (mode) { case 'enabled': SYNCING_MODE = 'enabled'; @@ -73,7 +73,7 @@ export function checkSyncingMode(mode: SyncingMode): boolean { } function apply(msg: Message, prev?: boolean) { - let { dataset, row, column, value } = msg; + const { dataset, row, column, value } = msg; if (dataset === 'prefs') { // Do nothing, it doesn't exist in the db @@ -107,10 +107,10 @@ async function fetchAll(table, ids) { let results = []; // was 500, but that caused a stack overflow in Safari - let batchSize = 100; + const batchSize = 100; for (let i = 0; i < ids.length; i += batchSize) { - let partIds = ids.slice(i, i + batchSize); + const partIds = ids.slice(i, i + batchSize); let sql; let column = `${table}.id`; @@ -133,7 +133,7 @@ async function fetchAll(table, ids) { sql += partIds.map(id => `${column} = ?`).join(' OR '); try { - let rows = await db.runQuery(sql, partIds, true); + const rows = await db.runQuery(sql, partIds, true); results = results.concat(rows); } catch (error) { throw new SyncError('invalid-schema', { @@ -191,14 +191,14 @@ export function addSyncListener(func: SyncListener) { } async function compareMessages(messages: Message[]): Promise { - let newMessages = []; + const newMessages = []; for (let i = 0; i < messages.length; i++) { - let message = messages[i]; - let { dataset, row, column, timestamp } = message; - let timestampStr = timestamp.toString(); + const message = messages[i]; + const { dataset, row, column, timestamp } = message; + const timestampStr = timestamp.toString(); - let res = db.runQuery( + const res = db.runQuery( db.cache( 'SELECT timestamp FROM messages_crdt WHERE dataset = ? AND row = ? AND column = ? AND timestamp >= ?', ), @@ -227,8 +227,8 @@ async function compareMessages(messages: Message[]): Promise { function applyMessagesForImport(messages: Message[]): void { db.transaction(() => { for (let i = 0; i < messages.length; i++) { - let msg = messages[i]; - let { dataset } = msg; + const msg = messages[i]; + const { dataset } = msg; if (!msg.old) { try { @@ -267,8 +267,8 @@ export const applyMessages = sequential(async (messages: Message[]) => { } messages = [...messages].sort((m1, m2) => { - let t1 = m1.timestamp ? m1.timestamp.toString() : ''; - let t2 = m2.timestamp ? m2.timestamp.toString() : ''; + const t1 = m1.timestamp ? m1.timestamp.toString() : ''; + const t2 = m2.timestamp ? m2.timestamp.toString() : ''; if (t1 < t2) { return -1; } else if (t1 > t2) { @@ -277,7 +277,7 @@ export const applyMessages = sequential(async (messages: Message[]) => { return 0; }); - let idsPerTable = {}; + const idsPerTable = {}; messages.forEach(msg => { if (msg.dataset === 'prefs') { return; @@ -290,13 +290,13 @@ export const applyMessages = sequential(async (messages: Message[]) => { }); async function fetchData(): Promise { - let data = new Map(); + const data = new Map(); - for (let table of Object.keys(idsPerTable)) { + for (const table of Object.keys(idsPerTable)) { const rows = await fetchAll(table, idsPerTable[table]); for (let i = 0; i < rows.length; i++) { - let row = rows[i]; + const row = rows[i]; setIn(data, [table, row.id], row); } } @@ -304,8 +304,8 @@ export const applyMessages = sequential(async (messages: Message[]) => { return data; } - let prefsToSet: LocalPrefs = {}; - let oldData = await fetchData(); + const prefsToSet: LocalPrefs = {}; + const oldData = await fetchData(); undo.appendMessages(messages, oldData); @@ -332,7 +332,7 @@ export const applyMessages = sequential(async (messages: Message[]) => { // also avoid any side effects to in-memory objects, and apply them // after this succeeds. db.transaction(() => { - let added = new Set(); + const added = new Set(); for (const msg of messages) { const { dataset, row, column, timestamp, value } = msg; @@ -392,7 +392,7 @@ export const applyMessages = sequential(async (messages: Message[]) => { connection.send('prefs-updated'); } - let newData = await fetchData(); + const newData = await fetchData(); // In testing, sometimes the spreadsheet isn't loaded, and that's ok if (sheet.get()) { @@ -409,7 +409,7 @@ export const applyMessages = sequential(async (messages: Message[]) => { _syncListeners.forEach(func => func(oldData, newData)); - let tables = getTablesFromMessages(messages.filter(msg => !msg.old)); + const tables = getTablesFromMessages(messages.filter(msg => !msg.old)); app.events.emit('sync', { type: 'applied', tables, @@ -524,7 +524,7 @@ export function scheduleFullSync(): Promise< function getTablesFromMessages(messages: Message[]): string[] { return messages.reduce((acc, message) => { - let dataset = + const dataset = message.dataset === 'schedules_next_date' ? 'schedules' : message.dataset; if (!acc.includes(dataset)) { @@ -541,7 +541,7 @@ function getTablesFromMessages(messages: Message[]): string[] { export async function initialFullSync(): Promise<{ error?: { message: string; reason: string; meta: unknown }; }> { - let result = await fullSync(); + const result = await fullSync(); if (isError(result)) { // Make sure to wait for anything in the spreadsheet to process await sheet.waitOnSpreadsheet(); @@ -610,7 +610,7 @@ export const fullSync = once(async function (): Promise< return { error: { message: e.message, reason: e.reason, meta: e.meta } }; } - let tables = getTablesFromMessages(messages); + const tables = getTablesFromMessages(messages); app.events.emit('sync', { type: 'success', @@ -625,7 +625,7 @@ async function _fullSync( count: number, prevDiffTime: number, ): Promise { - let { cloudFileId, groupId, lastSyncedTimestamp } = prefs.getPrefs() || {}; + const { cloudFileId, groupId, lastSyncedTimestamp } = prefs.getPrefs() || {}; clearFullSyncTimeout(); @@ -634,17 +634,17 @@ async function _fullSync( } // Snapshot the point at which we are currently syncing - let currentTime = getClock().timestamp.toString(); + const currentTime = getClock().timestamp.toString(); - let since = + const since = sinceTimestamp || lastSyncedTimestamp || // Default to 5 minutes ago new Timestamp(Date.now() - 5 * 60 * 1000, 0, '0').toString(); - let messages = getMessagesSince(since); + const messages = getMessagesSince(since); - let userToken = await asyncStorage.getItem('user-token'); + const userToken = await asyncStorage.getItem('user-token'); logger.info( 'Syncing since', @@ -653,15 +653,19 @@ async function _fullSync( '(attempt: ' + count + ')', ); - let buffer = await encoder.encode(groupId, cloudFileId, since, messages); + const buffer = await encoder.encode(groupId, cloudFileId, since, messages); // TODO: There a limit on how many messages we can send because of // the payload size. Right now it's at 20MB on the server. We should // check the worst case here and make multiple requests if it's // really large. - let resBuffer = await postBinary(getServer().SYNC_SERVER + '/sync', buffer, { - 'X-ACTUAL-TOKEN': userToken, - }); + const resBuffer = await postBinary( + getServer().SYNC_SERVER + '/sync', + buffer, + { + 'X-ACTUAL-TOKEN': userToken, + }, + ); // Abort if the file is either no longer loaded, the group id has // changed because of a sync reset @@ -669,11 +673,11 @@ async function _fullSync( return []; } - let res = await encoder.decode(resBuffer); + const res = await encoder.decode(resBuffer); logger.info('Got messages from server', res.messages.length); - let localTimeChanged = getClock().timestamp.toString() !== currentTime; + const localTimeChanged = getClock().timestamp.toString() !== currentTime; // Apply the new messages let receivedMessages: Message[] = []; @@ -686,7 +690,7 @@ async function _fullSync( ); } - let diffTime = merkle.diff(res.merkle, getClock().merkle); + const diffTime = merkle.diff(res.merkle, getClock().merkle); if (diffTime !== null) { // This is a bit wonky, but we loop until we are in sync with the @@ -709,7 +713,7 @@ async function _fullSync( logger.info('RECEIVED -------'); logger.info(JSON.stringify(res.messages)); - let rebuiltMerkle = rebuildMerkleHash(); + const rebuiltMerkle = rebuildMerkleHash(); console.log( count, @@ -740,11 +744,11 @@ async function _fullSync( if (rebuiltMerkle.trie.hash === res.merkle.hash) { // Rebuilding the merkle worked... but why? - let clocks = await db.all('SELECT * FROM messages_clock'); + const clocks = await db.all('SELECT * FROM messages_clock'); if (clocks.length !== 1) { console.log('Bad number of clocks:', clocks.length); } - let hash = deserializeClock(clocks[0]).merkle.hash; + const hash = deserializeClock(clocks[0]).merkle.hash; console.log('Merkle hash in db:', hash); } diff --git a/packages/loot-core/src/server/sync/make-test-message.ts b/packages/loot-core/src/server/sync/make-test-message.ts index f686060fb1a..7c864775933 100644 --- a/packages/loot-core/src/server/sync/make-test-message.ts +++ b/packages/loot-core/src/server/sync/make-test-message.ts @@ -7,12 +7,12 @@ async function randomString() { } export default async function makeTestMessage(keyId) { - let messagePb = new SyncProtoBuf.Message(); + const messagePb = new SyncProtoBuf.Message(); messagePb.setDataset(await randomString()); messagePb.setRow(await randomString()); messagePb.setColumn(await randomString()); messagePb.setValue(await randomString()); - let binaryMsg = messagePb.serializeBinary(); + const binaryMsg = messagePb.serializeBinary(); return await encryption.encrypt(binaryMsg, keyId); } diff --git a/packages/loot-core/src/server/sync/migrate.test.ts b/packages/loot-core/src/server/sync/migrate.test.ts index 005d68d8e1f..9bb54f703d5 100644 --- a/packages/loot-core/src/server/sync/migrate.test.ts +++ b/packages/loot-core/src/server/sync/migrate.test.ts @@ -19,21 +19,21 @@ afterEach(() => { unlisten(); }); -let tableSchema = schema.transactions; -let fields = Object.keys(tableSchema); +const tableSchema = schema.transactions; +const fields = Object.keys(tableSchema); function toInternalField(publicField) { return schemaConfig.views.transactions.fields[publicField]; } -let messageArb: fc.Arbitrary = fc +const messageArb: fc.Arbitrary = fc .oneof(...fields.filter(f => f !== 'id').map(field => fc.constant(field))) .chain(field => { - let value = arbs + const value = arbs .typeArbitrary(tableSchema[field]) .map(v => convertInputType(v, tableSchema[field].type)); - let timestamp = fc + const timestamp = fc .date({ min: new Date('2020-01-01T00:00:00.000Z'), max: new Date('2020-05-01T00:00:00.000Z'), @@ -59,11 +59,11 @@ let messageArb: fc.Arbitrary = fc describe('sync migrations', () => { it('should set the parent_id', async () => { - let tracer = execTracer(); + const tracer = execTracer(); tracer.start(); - let cleanup = addSyncListener((oldValues, newValues) => { - let transactionsMap = newValues.get('transactions') as Map< + const cleanup = addSyncListener((oldValues, newValues) => { + const transactionsMap = newValues.get('transactions') as Map< string, unknown >; @@ -78,7 +78,7 @@ describe('sync migrations', () => { tracer.expectNow('applied', ['trans1/child1']); await tracer.expectWait('applied', ['trans1/child1']); - let transactions = db.runQuery('SELECT * FROM transactions', [], true); + const transactions = db.runQuery('SELECT * FROM transactions', [], true); expect(transactions.length).toBe(1); expect(transactions[0].parent_id).toBe('trans1'); @@ -90,10 +90,10 @@ describe('sync migrations', () => { await fc.assert( fc .asyncProperty(fc.array(messageArb, { maxLength: 100 }), async msgs => { - let tracer = execTracer(); + const tracer = execTracer(); tracer.start(); - let cleanup = addSyncListener((oldValues, newValues) => { - let ts = newValues.get('transactions') as Map< + const cleanup = addSyncListener((oldValues, newValues) => { + const ts = newValues.get('transactions') as Map< string, { isChild: number; parent_id: string | null; id: string } >; @@ -112,13 +112,13 @@ describe('sync migrations', () => { await sendMessages(msgs); await tracer.expect('applied'); - let transactions = await db.all('SELECT * FROM transactions', []); - for (let trans of transactions) { - let transMsgs = msgs + const transactions = await db.all('SELECT * FROM transactions', []); + for (const trans of transactions) { + const transMsgs = msgs .filter(msg => msg.row === trans.id) .sort((m1, m2) => { - let t1 = m1.timestamp.toString(); - let t2 = m2.timestamp.toString(); + const t1 = m1.timestamp.toString(); + const t2 = m2.timestamp.toString(); if (t1 < t2) { return 1; } else if (t1 > t2) { @@ -126,7 +126,7 @@ describe('sync migrations', () => { } return 0; }); - let msg = transMsgs.find(m => m.column === 'parent_id'); + const msg = transMsgs.find(m => m.column === 'parent_id'); if ( trans.isChild === 1 && @@ -136,7 +136,7 @@ describe('sync migrations', () => { // This is a child transaction didn't have a `parent_id` // set in the messages. It should have gotten set from // the `id` - let [parentId] = trans.id.split('/'); + const [parentId] = trans.id.split('/'); expect(parentId).not.toBe(null); expect(trans.parent_id).toBe(parentId); } else if (msg) { diff --git a/packages/loot-core/src/server/sync/migrate.ts b/packages/loot-core/src/server/sync/migrate.ts index dbc0fd951ef..8bfceabf475 100644 --- a/packages/loot-core/src/server/sync/migrate.ts +++ b/packages/loot-core/src/server/sync/migrate.ts @@ -5,7 +5,7 @@ import { Message, addSyncListener, applyMessages } from './index'; function migrateParentIds(_oldValues, newValues) { newValues.forEach((items, table) => { if (table === 'transactions') { - let toApply: Message[] = []; + const toApply: Message[] = []; items.forEach(newValue => { if ( @@ -13,7 +13,7 @@ function migrateParentIds(_oldValues, newValues) { newValue.parent_id == null && newValue.id.includes('/') ) { - let parentId = newValue.id.split('/')[0]; + const parentId = newValue.id.split('/')[0]; toApply.push({ dataset: 'transactions', diff --git a/packages/loot-core/src/server/sync/repair.ts b/packages/loot-core/src/server/sync/repair.ts index fdb7c4cc320..1ec8fba70b2 100644 --- a/packages/loot-core/src/server/sync/repair.ts +++ b/packages/loot-core/src/server/sync/repair.ts @@ -6,7 +6,7 @@ export function rebuildMerkleHash(): { numMessages: number; trie: merkle.TrieNode; } { - let rows: { timestamp: string }[] = db.runQuery( + const rows: { timestamp: string }[] = db.runQuery( 'SELECT timestamp FROM messages_crdt', [], true, @@ -24,8 +24,8 @@ export function rebuildMerkleHash(): { } export default async function repairSync(): Promise { - let rebuilt = rebuildMerkleHash(); - let clock = getClock(); + const rebuilt = rebuildMerkleHash(); + const clock = getClock(); // Save it locally clock.merkle = rebuilt.trie; diff --git a/packages/loot-core/src/server/sync/reset.ts b/packages/loot-core/src/server/sync/reset.ts index ae8627757f4..8a88c2abcea 100644 --- a/packages/loot-core/src/server/sync/reset.ts +++ b/packages/loot-core/src/server/sync/reset.ts @@ -13,7 +13,7 @@ export default async function resetSync( // If we aren't resetting the key, make sure our key is up-to-date // so we don't accidentally upload a file encrypted with the wrong // key (or not encrypted at all) - let { valid, error } = await cloudStorage.checkKey(); + const { valid, error } = await cloudStorage.checkKey(); if (error) { return { error }; } else if (!valid) { @@ -21,7 +21,7 @@ export default async function resetSync( } } - let { error } = await cloudStorage.resetSyncState(keyState); + const { error } = await cloudStorage.resetSyncState(keyState); if (error) { return { error }; } @@ -55,14 +55,16 @@ export default async function resetSync( }); if (keyState) { - let { key } = keyState; - let { cloudFileId } = prefs.getPrefs(); + const { key } = keyState; + const { cloudFileId } = prefs.getPrefs(); // The key has changed, we need to update our local data to // store the new key // Persist key in async storage - let keys = JSON.parse((await asyncStorage.getItem(`encrypt-keys`)) || '{}'); + const keys = JSON.parse( + (await asyncStorage.getItem(`encrypt-keys`)) || '{}', + ); keys[cloudFileId] = key.serialize(); await asyncStorage.setItem('encrypt-keys', JSON.stringify(keys)); diff --git a/packages/loot-core/src/server/sync/sync.property.test.ts b/packages/loot-core/src/server/sync/sync.property.test.ts index 94ad71c2ac2..9532405d486 100644 --- a/packages/loot-core/src/server/sync/sync.property.test.ts +++ b/packages/loot-core/src/server/sync/sync.property.test.ts @@ -27,7 +27,7 @@ afterEach(() => { global.resetTime(); }); -let schema = { +const schema = { spreadsheet_cells: { expr: 'text', }, @@ -89,9 +89,9 @@ let schema = { }; // The base time is 2019-08-09T18:14:31.903Z -let baseTime = 1565374471903; -let clientId1 = '80dd7da215247293'; -let clientId2 = '90xU1sd5124329ac'; +const baseTime = 1565374471903; +const clientId1 = '80dd7da215247293'; +const clientId2 = '90xU1sd5124329ac'; function makeGen({ table, @@ -127,7 +127,7 @@ function makeGen({ }); } -let generators = []; +const generators = []; Object.keys(schema).forEach(table => { Object.keys(schema[table]).reduce((obj, field) => { if (table === 'spreadsheet_cells' && field === 'expr') { @@ -145,7 +145,7 @@ Object.keys(schema).forEach(table => { return obj; } - let type = schema[table][field]; + const type = schema[table][field]; switch (type) { case 'text': generators.push(makeGen({ table, field, value: jsc.asciinestring })); @@ -173,11 +173,11 @@ Object.keys(schema).forEach(table => { }); function shuffle(arr) { - let src = [...arr]; - let shuffled = new Array(src.length); + const src = [...arr]; + const shuffled = new Array(src.length); let item; while ((item = src.pop())) { - let idx = Math.floor(Math.random() * shuffled.length); + const idx = Math.floor(Math.random() * shuffled.length); if (shuffled[idx]) { src.push(item); } else { @@ -188,7 +188,7 @@ function shuffle(arr) { } function divide(arr) { - let res = []; + const res = []; for (let i = 0; i < arr.length; i += 10) { res.push(arr.slice(i, i + 10)); } @@ -199,11 +199,11 @@ async function run(msgs) { mockSyncServer.reset(); // Do some post-processing of the data - let knownTimestamps = new Set(); - let res = msgs.reduce( + const knownTimestamps = new Set(); + const res = msgs.reduce( (acc, msg) => { // Filter out duplicate timestamps - let ts = msg.timestamp.toString(); + const ts = msg.timestamp.toString(); if (knownTimestamps.has(ts)) { return acc; } @@ -240,9 +240,9 @@ async function run(msgs) { // server from another client, wait for all the `sendMessages` to // complete, then do another `fullSync`, and finally check the // merkle tree to see if there are any differences. - let chunks = divide(res.firstMessages); + const chunks = divide(res.firstMessages); - let client1Sync = Promise.all( + const client1Sync = Promise.all( chunks.slice(0, -1).map(slice => sync.receiveMessages(slice)), ); await client1Sync; @@ -260,11 +260,11 @@ async function run(msgs) { ), ); - let syncPromise = sync.fullSync(); + const syncPromise = sync.fullSync(); // Add in some more messages while the sync is running, this makes // sure that the loop works - let lastReceive = sync.receiveMessages(chunks[chunks.length - 1]); + const lastReceive = sync.receiveMessages(chunks[chunks.length - 1]); mockSyncServer.handlers['/sync/sync']( await encoder.encode( @@ -279,13 +279,13 @@ async function run(msgs) { ), ); - let result = await syncPromise; + const result = await syncPromise; if (isError(result)) { console.log(result.error); throw result.error; } - let serverMerkle = mockSyncServer.getClock().merkle; + const serverMerkle = mockSyncServer.getClock().merkle; // Double-check that the data is in sync let diff = merkle.diff(serverMerkle, getClock().merkle); @@ -310,7 +310,7 @@ async function run(msgs) { describe('sync property test', () => { xit('should always sync clients into the same state', async () => { - let test = await jsc.check( + const test = await jsc.check( jsc.forall( jsc.tuple(Array.from(new Array(100)).map(() => jsc.oneof(generators))), async msgs => { @@ -328,7 +328,7 @@ describe('sync property test', () => { } for (let i = 0; i < 10; i++) { - let shuffled = shuffle(msgs); + const shuffled = shuffle(msgs); r = await run(shuffled); if (r === false) { return false; @@ -365,7 +365,7 @@ describe('sync property test', () => { // Copy and paste a counterexample that the property test finds // here. That way you can work on it separately and figure out // what's wrong. - let msgs = convert([ + const msgs = convert([ { dataset: 'accounts', row: 't', @@ -376,7 +376,7 @@ describe('sync property test', () => { // ... ]); - let res = await run(msgs); + const res = await run(msgs); expect(res).toBe(true); }); }); diff --git a/packages/loot-core/src/server/sync/sync.test.ts b/packages/loot-core/src/server/sync/sync.test.ts index a0a8673c776..7ab2fdd7dc0 100644 --- a/packages/loot-core/src/server/sync/sync.test.ts +++ b/packages/loot-core/src/server/sync/sync.test.ts @@ -146,8 +146,8 @@ describe('Sync', () => { }); async function registerBudgetMonths(months) { - let createdMonths = new Set(); - for (let month of months) { + const createdMonths = new Set(); + for (const month of months) { createdMonths.add(month); } sheet.get().meta().createdMonths = months; @@ -170,12 +170,12 @@ async function asSecondClient(func) { } function expectCellToExist(sheetName, name) { - let value = sheet.get().getCellValueLoose(sheetName, name); + const value = sheet.get().getCellValueLoose(sheetName, name); expect(value).not.toBe(null); } function expectCellNotToExist(sheetName, name, voided?: boolean) { - let value = sheet.get().getCellValueLoose(sheetName, name); + const value = sheet.get().getCellValueLoose(sheetName, name); expect(value).toBe(voided ? 0 : null); } @@ -240,7 +240,7 @@ describe('Sync projections', () => { registerBudgetMonths(['2017-01', '2017-02']); // Get all the messages. We'll apply them in two passes - let messages = mockSyncServer.getMessages(); + const messages = mockSyncServer.getMessages(); // Apply all but the last message (which deletes the category) await applyMessages(messages.slice(0, -1)); @@ -291,10 +291,10 @@ describe('Sync projections', () => { registerBudgetMonths(['2017-01', '2017-02']); // Get all the messages. We'll apply them in two passes - let messages = mockSyncServer.getMessages(); + const messages = mockSyncServer.getMessages(); - let firstMessages = messages.filter(m => m.column !== 'tombstone'); - let secondMessages = messages.filter(m => m.column === 'tombstone'); + const firstMessages = messages.filter(m => m.column !== 'tombstone'); + const secondMessages = messages.filter(m => m.column === 'tombstone'); // Apply all the good messages await applyMessages(firstMessages); @@ -325,14 +325,14 @@ describe('Sync projections', () => { registerBudgetMonths(['2017-01', '2017-02']); // Get all the messages. We'll apply them in two passes - let messages = mockSyncServer.getMessages(); + const messages = mockSyncServer.getMessages(); - let firstMessages = messages.slice(0, -2); - let secondMessages = messages.slice(-2); + const firstMessages = messages.slice(0, -2); + const secondMessages = messages.slice(-2); // Apply all the good messages await applyMessages(firstMessages); - let [cat] = await db.getCategories(); + const [cat] = await db.getCategories(); expect(cat.cat_group).toBe('group1'); expectCellToExist('budget201701', 'group-sum-amount-' + groupId); diff --git a/packages/loot-core/src/server/tests/mockSyncServer.ts b/packages/loot-core/src/server/tests/mockSyncServer.ts index 09db3b76321..a1498219273 100644 --- a/packages/loot-core/src/server/tests/mockSyncServer.ts +++ b/packages/loot-core/src/server/tests/mockSyncServer.ts @@ -40,11 +40,11 @@ handlers['/'] = () => { }; handlers['/sync/sync'] = async (data: Uint8Array): Promise => { - let requestPb = SyncProtoBuf.SyncRequest.deserializeBinary(data); - let since = requestPb.getSince(); - let messages = requestPb.getMessagesList(); + const requestPb = SyncProtoBuf.SyncRequest.deserializeBinary(data); + const since = requestPb.getSince(); + const messages = requestPb.getMessagesList(); - let newMessages = currentMessages.filter(msg => msg.timestamp > since); + const newMessages = currentMessages.filter(msg => msg.timestamp > since); messages.forEach(msg => { if (!currentMessages.find(m => m.timestamp === msg.getTimestamp())) { @@ -63,11 +63,11 @@ handlers['/sync/sync'] = async (data: Uint8Array): Promise => { currentClock.merkle = merkle.prune(currentClock.merkle); - let responsePb = new SyncProtoBuf.SyncResponse(); + const responsePb = new SyncProtoBuf.SyncResponse(); responsePb.setMerkle(JSON.stringify(currentClock.merkle)); newMessages.forEach(msg => { - let envelopePb = new SyncProtoBuf.MessageEnvelope(); + const envelopePb = new SyncProtoBuf.MessageEnvelope(); envelopePb.setTimestamp(msg.timestamp); envelopePb.setIsencrypted(msg.is_encrypted); envelopePb.setContent(msg.content); @@ -106,7 +106,7 @@ handlers['/plaid/transactions'] = ({ }; export const filterMockData = func => { - let copied = JSON.parse(JSON.stringify(defaultMockData)); + const copied = JSON.parse(JSON.stringify(defaultMockData)); currentMockData = func(copied); }; @@ -122,8 +122,8 @@ export const getClock = (): Clock => { export const getMessages = (): Message[] => { return currentMessages.map(msg => { - let { timestamp, content } = msg; - let fields = SyncProtoBuf.Message.deserializeBinary(content); + const { timestamp, content } = msg; + const fields = SyncProtoBuf.Message.deserializeBinary(content); return { timestamp: Timestamp.parse(timestamp), diff --git a/packages/loot-core/src/server/tools/app.ts b/packages/loot-core/src/server/tools/app.ts index 867ad263809..e10e605ee3e 100644 --- a/packages/loot-core/src/server/tools/app.ts +++ b/packages/loot-core/src/server/tools/app.ts @@ -5,19 +5,19 @@ import { runMutator } from '../mutators'; import { ToolsHandlers } from './types/handlers'; -let app = createApp(); +const app = createApp(); app.method('tools/fix-split-transactions', async () => { // 1. Check for child transactions that have a blank payee, and set // the payee to whatever the parent has - let blankPayeeRows = await db.all(` + const blankPayeeRows = await db.all(` SELECT t.*, p.payee AS parentPayee FROM v_transactions_internal t LEFT JOIN v_transactions_internal p ON t.parent_id = p.id WHERE t.is_child = 1 AND t.payee IS NULL AND p.payee IS NOT NULL `); await runMutator(async () => { - let updated = blankPayeeRows.map(row => ({ + const updated = blankPayeeRows.map(row => ({ id: row.id, payee: row.parentPayee, })); @@ -26,14 +26,14 @@ app.method('tools/fix-split-transactions', async () => { // 2. Make sure the "cleared" flag is synced up with the parent // transactions - let clearedRows = await db.all(` + const clearedRows = await db.all(` SELECT t.id, p.cleared FROM v_transactions_internal t LEFT JOIN v_transactions_internal p ON t.parent_id = p.id WHERE t.is_child = 1 AND t.cleared != p.cleared `); await runMutator(async () => { - let updated = clearedRows.map(row => ({ + const updated = clearedRows.map(row => ({ id: row.id, cleared: row.cleared === 1, })); @@ -42,14 +42,14 @@ app.method('tools/fix-split-transactions', async () => { // 3. Mark the `tombstone` field as true on any child transactions // that have a dead parent - let deletedRows = await db.all(` + const deletedRows = await db.all(` SELECT t.* FROM v_transactions_internal t LEFT JOIN v_transactions_internal p ON t.parent_id = p.id WHERE t.is_child = 1 AND t.tombstone = 0 AND (p.tombstone = 1 OR p.id IS NULL) `); await runMutator(async () => { - let updated = deletedRows.map(row => ({ id: row.id, tombstone: 1 })); + const updated = deletedRows.map(row => ({ id: row.id, tombstone: 1 })); await batchUpdateTransactions({ updated }); }); diff --git a/packages/loot-core/src/server/undo.ts b/packages/loot-core/src/server/undo.ts index a53ca60db1b..faf5fa9278d 100644 --- a/packages/loot-core/src/server/undo.ts +++ b/packages/loot-core/src/server/undo.ts @@ -19,7 +19,7 @@ let MESSAGE_HISTORY: Array = [ { type: 'marker' }, ]; let CURSOR = 0; -let HISTORY_SIZE = 20; +const HISTORY_SIZE = 20; export type UndoState = { messages: Message[]; @@ -31,22 +31,22 @@ export type UndoState = { function trimHistory() { MESSAGE_HISTORY = MESSAGE_HISTORY.slice(0, CURSOR + 1); - let markers = MESSAGE_HISTORY.filter(item => item.type === 'marker'); + const markers = MESSAGE_HISTORY.filter(item => item.type === 'marker'); if (markers.length > HISTORY_SIZE) { - let slice = markers.slice(-HISTORY_SIZE); - let cutoff = MESSAGE_HISTORY.indexOf(slice[0]); + const slice = markers.slice(-HISTORY_SIZE); + const cutoff = MESSAGE_HISTORY.indexOf(slice[0]); MESSAGE_HISTORY = MESSAGE_HISTORY.slice(cutoff); CURSOR = MESSAGE_HISTORY.length - 1; } } export function appendMessages(messages, oldData) { - let context = getMutatorContext(); + const context = getMutatorContext(); if (context.undoListening && messages.length > 0) { trimHistory(); - let { undoTag } = context; + const { undoTag } = context; MESSAGE_HISTORY.push({ type: 'messages', @@ -67,14 +67,14 @@ export function withUndo( func: () => Promise, meta?: unknown, ): Promise { - let context = getMutatorContext(); + const context = getMutatorContext(); if (context.undoDisabled || context.undoListening) { return func(); } MESSAGE_HISTORY = MESSAGE_HISTORY.slice(0, CURSOR + 1); - let marker: MarkerMessage = { type: 'marker', meta }; + const marker: MarkerMessage = { type: 'marker', meta }; if (MESSAGE_HISTORY[MESSAGE_HISTORY.length - 1].type === 'marker') { MESSAGE_HISTORY[MESSAGE_HISTORY.length - 1] = marker; @@ -128,7 +128,7 @@ async function applyUndoAction(messages, meta, undoTag) { } export async function undo() { - let end = CURSOR; + const end = CURSOR; CURSOR = Math.max(CURSOR - 1, 0); // Walk back to the nearest marker @@ -136,14 +136,14 @@ export async function undo() { CURSOR--; } - let meta = MESSAGE_HISTORY[CURSOR].meta; - let start = Math.max(CURSOR, 0); - let entries = MESSAGE_HISTORY.slice(start, end + 1).filter( + const meta = MESSAGE_HISTORY[CURSOR].meta; + const start = Math.max(CURSOR, 0); + const entries = MESSAGE_HISTORY.slice(start, end + 1).filter( (entry): entry is MessagesMessage => entry.type === 'messages', ); if (entries.length > 0) { - let toApply = entries + const toApply = entries .reduce((acc, entry) => { return acc.concat( entry.messages @@ -158,7 +158,7 @@ export async function undo() { } function undoMessage(message, oldData) { - let oldItem = getIn(oldData, [message.dataset, message.row]); + const oldItem = getIn(oldData, [message.dataset, message.row]); if (oldItem) { let column = message.column; if (message.dataset === 'spreadsheet_cells') { @@ -203,12 +203,12 @@ function undoMessage(message, oldData) { } export async function redo() { - let meta = + const meta = MESSAGE_HISTORY[CURSOR].type === 'marker' ? MESSAGE_HISTORY[CURSOR].meta : null; - let start = CURSOR; + const start = CURSOR; CURSOR = Math.min(CURSOR + 1, MESSAGE_HISTORY.length - 1); // Walk forward to the nearest marker @@ -219,13 +219,13 @@ export async function redo() { CURSOR++; } - let end = CURSOR; - let entries = MESSAGE_HISTORY.slice(start + 1, end + 1).filter( + const end = CURSOR; + const entries = MESSAGE_HISTORY.slice(start + 1, end + 1).filter( (entry): entry is MessagesMessage => entry.type === 'messages', ); if (entries.length > 0) { - let toApply = entries.reduce((acc, entry) => { + const toApply = entries.reduce((acc, entry) => { return acc .concat(entry.messages) .concat(redoResurrections(entry.messages, entry.oldData)); @@ -236,12 +236,12 @@ export async function redo() { } function redoResurrections(messages, oldData): Message[] { - let resurrect = new Set(); + const resurrect = new Set(); messages.forEach(message => { // If any of the ids didn't exist before, we need to "resurrect" // them by resetting their tombstones to 0 - let oldItem = getIn(oldData, [message.dataset, message.row]); + const oldItem = getIn(oldData, [message.dataset, message.row]); if ( !oldItem && ![ @@ -258,7 +258,7 @@ function redoResurrections(messages, oldData): Message[] { }); return [...resurrect].map(desc => { - let [table, row] = desc.split('.'); + const [table, row] = desc.split('.'); return { dataset: table, row, diff --git a/packages/loot-core/src/server/update.ts b/packages/loot-core/src/server/update.ts index 221b4869fc3..fb58adb5b80 100644 --- a/packages/loot-core/src/server/update.ts +++ b/packages/loot-core/src/server/update.ts @@ -11,14 +11,14 @@ async function runMigrations() { } async function updateViews() { - let hashKey = 'view-hash'; - let row = await db.first('SELECT value FROM __meta__ WHERE key = ?', [ + const hashKey = 'view-hash'; + const row = await db.first('SELECT value FROM __meta__ WHERE key = ?', [ hashKey, ]); - let { value: hash } = row || {}; + const { value: hash } = row || {}; - let views = makeViews(schema, schemaConfig); - let currentHash = md5(views); + const views = makeViews(schema, schemaConfig); + const currentHash = md5(views); if (hash !== currentHash) { await db.execQuery(views); diff --git a/packages/loot-core/src/server/util/budget-name.ts b/packages/loot-core/src/server/util/budget-name.ts index b6e9bfe7833..2a1481ea293 100644 --- a/packages/loot-core/src/server/util/budget-name.ts +++ b/packages/loot-core/src/server/util/budget-name.ts @@ -3,7 +3,7 @@ import { v4 as uuidv4 } from 'uuid'; import * as fs from '../../platform/server/fs'; export async function uniqueFileName(existingFiles) { - let initialName = 'My Finances'; + const initialName = 'My Finances'; let idx = 1; // If there is a conflict, keep appending an index until there is no