From f74dc7c70e24c8614eee47fcdb22af4265a9b326 Mon Sep 17 00:00:00 2001 From: Chris Connelly Date: Tue, 29 Jun 2021 13:12:08 +0100 Subject: [PATCH] fix: Use toml-patch to minimise Cargo.lock diffs The @iarna/toml package stringifies TOML in a different format from the canonical `Cargo.lock` format. It also has no options to configure the formatting. The toml-patch package claims to preserve formatting in the original document, so let's see if that's the case... --- index.js | 18 +- node_modules/.yarn-integrity | 12 +- node_modules/@iarna/toml/CHANGELOG.md | 278 -- node_modules/@iarna/toml/LICENSE | 14 - node_modules/@iarna/toml/README.md | 301 --- node_modules/@iarna/toml/index.d.ts | 57 - node_modules/@iarna/toml/lib/create-date.js | 23 - .../@iarna/toml/lib/create-datetime-float.js | 24 - .../@iarna/toml/lib/create-datetime.js | 10 - node_modules/@iarna/toml/lib/create-time.js | 22 - node_modules/@iarna/toml/lib/format-num.js | 6 - node_modules/@iarna/toml/lib/parser-debug.js | 60 - node_modules/@iarna/toml/lib/parser.js | 127 - node_modules/@iarna/toml/lib/toml-parser.js | 1379 ---------- node_modules/@iarna/toml/package.json | 82 - node_modules/@iarna/toml/parse-async.js | 30 - .../@iarna/toml/parse-pretty-error.js | 33 - node_modules/@iarna/toml/parse-stream.js | 80 - node_modules/@iarna/toml/parse-string.js | 18 - node_modules/@iarna/toml/parse.js | 5 - node_modules/@iarna/toml/stringify.js | 296 --- node_modules/@iarna/toml/toml.js | 3 - node_modules/toml-patch/LICENSE | 21 + node_modules/toml-patch/README.md | 104 + .../toml-patch/dist/toml-patch.cjs.min.js | 2 + .../toml-patch/dist/toml-patch.cjs.min.js.map | 1 + node_modules/toml-patch/dist/toml-patch.d.ts | 18 + node_modules/toml-patch/dist/toml-patch.es.js | 2255 +++++++++++++++++ .../toml-patch/dist/toml-patch.umd.min.js | 2 + .../toml-patch/dist/toml-patch.umd.min.js.map | 1 + node_modules/toml-patch/package.json | 61 + package.json | 4 +- yarn.lock | 10 +- 33 files changed, 2487 insertions(+), 2870 deletions(-) delete mode 100755 node_modules/@iarna/toml/CHANGELOG.md delete mode 100755 node_modules/@iarna/toml/LICENSE delete mode 100755 node_modules/@iarna/toml/README.md delete mode 100755 node_modules/@iarna/toml/index.d.ts delete mode 100755 node_modules/@iarna/toml/lib/create-date.js delete mode 100755 node_modules/@iarna/toml/lib/create-datetime-float.js delete mode 100755 node_modules/@iarna/toml/lib/create-datetime.js delete mode 100755 node_modules/@iarna/toml/lib/create-time.js delete mode 100755 node_modules/@iarna/toml/lib/format-num.js delete mode 100755 node_modules/@iarna/toml/lib/parser-debug.js delete mode 100755 node_modules/@iarna/toml/lib/parser.js delete mode 100755 node_modules/@iarna/toml/lib/toml-parser.js delete mode 100755 node_modules/@iarna/toml/package.json delete mode 100755 node_modules/@iarna/toml/parse-async.js delete mode 100755 node_modules/@iarna/toml/parse-pretty-error.js delete mode 100755 node_modules/@iarna/toml/parse-stream.js delete mode 100755 node_modules/@iarna/toml/parse-string.js delete mode 100755 node_modules/@iarna/toml/parse.js delete mode 100755 node_modules/@iarna/toml/stringify.js delete mode 100755 node_modules/@iarna/toml/toml.js create mode 100644 node_modules/toml-patch/LICENSE create mode 100644 node_modules/toml-patch/README.md create mode 100644 node_modules/toml-patch/dist/toml-patch.cjs.min.js create mode 100644 node_modules/toml-patch/dist/toml-patch.cjs.min.js.map create mode 100644 node_modules/toml-patch/dist/toml-patch.d.ts create mode 100644 node_modules/toml-patch/dist/toml-patch.es.js create mode 100644 node_modules/toml-patch/dist/toml-patch.umd.min.js create mode 100644 node_modules/toml-patch/dist/toml-patch.umd.min.js.map create mode 100644 node_modules/toml-patch/package.json diff --git a/index.js b/index.js index 4ff5026..2f73f60 100644 --- a/index.js +++ b/index.js @@ -2,7 +2,7 @@ const core = require('@actions/core'); const exec = require('@actions/exec'); const github = require('@actions/github'); const standardVersion = require('standard-version'); -const toml = require('@iarna/toml') +const toml = require('toml-patch') const fs = require('fs'); const bump = async () => { @@ -64,14 +64,15 @@ const bump = async () => { core.debug(`Commit message added was: ${commit_message}`); // parse and update cargo.toml - const manifest = toml.parse(fs.readFileSync('Cargo.toml', 'utf8')); + const manifestToml = fs.readFileSync('Cargo.toml', 'utf8'); + const manifest = toml.parse(manifestToml); manifest.package.version = cargo_version; - fs.writeFileSync('Cargo.toml', toml.stringify(manifest)); + fs.writeFileSync('Cargo.toml', toml.patch(manifestToml, manifest)); // parse and update Cargo.lock (if present) - let lockfile; + let lockfileToml; try { - lockfile = toml.parse(fs.readFileSync('Cargo.lock', 'utf8')); + lockfileToml = fs.readFileSync('Cargo.lock', 'utf8'); } catch (error) { if (error.code === 'ENOENT') { core.debug('No Cargo.lock to update'); @@ -80,11 +81,12 @@ const bump = async () => { } } - if (lockfile != null) { + if (lockfileToml != null) { + const lockfile = toml.parse(lockfileToml); const crate = lockfile.package.find(p => p.name === manifest.package.name); if (crate != null) { crate.version = cargo_version; - fs.writeFileSync('Cargo.lock', toml.stringify(lockfile)); + fs.writeFileSync('Cargo.lock', toml.patch(lockfileToml, lockfile)); } else { core.warn(`Self crate (${manifest.package.name}) not present in lockfile packages`); } @@ -142,4 +144,4 @@ const bump = async () => { } -bump() \ No newline at end of file +bump() diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity index 4f7a1f9..8ce919e 100644 --- a/node_modules/.yarn-integrity +++ b/node_modules/.yarn-integrity @@ -1,18 +1,16 @@ { - "systemParams": "darwin-x64-93", + "systemParams": "darwin-x64-72", "modulesFolders": [ "node_modules" ], "flags": [], - "linkedModules": [ - "sn_nodejs" - ], + "linkedModules": [], "topLevelPatterns": [ "@actions/core@^1.4.0", "@actions/exec@^1.1.0", "@actions/github@^5.0.0", - "@iarna/toml@^2.2.5", - "standard-version@^9.3.0" + "standard-version@^9.3.0", + "toml-patch@^0.2.3" ], "lockfileEntries": { "@actions/core@^1.4.0": "https://registry.yarnpkg.com/@actions/core/-/core-1.4.0.tgz#cf2e6ee317e314b03886adfeb20e448d50d6e524", @@ -23,7 +21,6 @@ "@babel/code-frame@^7.0.0": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb", "@babel/helper-validator-identifier@^7.14.5": "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.5.tgz#d0f0e277c512e0c938277faa85a3968c9a44c0e8", "@babel/highlight@^7.14.5": "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9", - "@iarna/toml@^2.2.5": "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c", "@octokit/auth-token@^2.4.4": "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.5.tgz#568ccfb8cb46f36441fac094ce34f7a875b197f3", "@octokit/core@^3.4.0": "https://registry.yarnpkg.com/@octokit/core/-/core-3.5.1.tgz#8601ceeb1ec0e1b1b8217b960a413ed8e947809b", "@octokit/endpoint@^6.0.1": "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.12.tgz#3b4d47a4b0e79b1027fb8d75d4221928b2d05658", @@ -264,6 +261,7 @@ "through2@^4.0.0": "https://registry.yarnpkg.com/through2/-/through2-4.0.2.tgz#a7ce3ac2a7a8b0b966c80e7c49f0484c3b239764", "through@2": "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5", "through@>=2.2.7 <3": "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5", + "toml-patch@^0.2.3": "https://registry.yarnpkg.com/toml-patch/-/toml-patch-0.2.3.tgz#5d6e5884755089c06622aa5fee7458d995520e8a", "trim-newlines@^1.0.0": "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613", "trim-newlines@^3.0.0": "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144", "trim-off-newlines@^1.0.0": "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz#9f9ba9d9efa8764c387698bcbfeb2c848f11adb3", diff --git a/node_modules/@iarna/toml/CHANGELOG.md b/node_modules/@iarna/toml/CHANGELOG.md deleted file mode 100755 index 21964f9..0000000 --- a/node_modules/@iarna/toml/CHANGELOG.md +++ /dev/null @@ -1,278 +0,0 @@ -# 2.2.5 - -* Docs: Updated benchmark results. Add fast-toml to result list. Improved benchmark layout. -* Update @sgarciac/bombadil and @ltd/j-toml in benchmarks and compliance tests. -* Dev: Some dev dep updates that shouldn't have any impact. - -# 2.2.4 - -* Bug fix: Plain date literals (not datetime) immediately followed by another statement (no whitespace or blank line) would crash. Fixes [#19](https://github.com/iarna/iarna-toml/issues/19) and [#23](https://github.com/iarna/iarna-toml/issues/23), thank you [@arnau](https://github.com/arnau) and [@jschaf](https://github.com/jschaf) for reporting this! -* Bug fix: Hex literals with lowercase Es would throw errors. (Thank you [@DaeCatt](https://github.com/DaeCatt) for this fix!) Fixed [#20](https://github.com/iarna/iarna-toml/issues/20) -* Some minor doc tweaks -* Added Node 12 and 13 to Travis. (Node 6 is failing there now, mysteriously. It works on my machine™, shipping anyway. 🙃) - -# 2.2.3 - -This release just updates the spec compliance tests and benchmark data to -better represent @ltd/j-toml. - -# 2.2.2 - -## Fixes - -* Support parsing and stringifying objects with `__proto__` properties. ([@LongTengDao](https://github.com/LongTengDao)) - -## Misc - -* Updates for spec compliance and benchmarking: - * @sgarciac/bombadil -> 2.1.0 - * toml -> 3.0.0 -* Added spec compliance and benchmarking for: - * @ltd/j-toml - -# 2.2.1 - -## Fixes - -* Fix bug where keys with names matching javascript Object methods would - error. Thanks [@LongTengDao](https://github.com/LongTengDao) for finding this! -* Fix bug where a bundled version would fail if `util.inspect` wasn't - provided. This was supposed to be guarded against, but there was a bug in - the guard. Thanks [@agriffis](https://github.com/agriffis) for finding and fixing this! - -## Misc - -* Update the version of bombadil for spec compliance and benchmarking purposes to 2.0.0 - -## Did you know? - -Node 6 and 8 are measurably slower than Node 6, 10 and 11, at least when it comes to parsing TOML! - -![](https://pbs.twimg.com/media/DtDeVjmU4AE5apz.jpg) - -# 2.2.0 - -## Features - -* Typescript: Lots of improvements to our type definitions, many many to - [@jorgegonzalez](https://github.com/jorgegonzalez) and [@momocow](https://github.com/momocow) for working through these. - -## Fixes - -* Very large integers (>52bit) are stored as BigInts on runtimes that - support them. BigInts are 128bits, but the TOML spec limits its integers - to 64bits. We now limit our integers to 64bits - as well. -* Fix a bug in stringify where control characters were being emitted as unicode chars and not escape sequences. - -## Misc - -* Moved our spec tests out to an external repo -* Improved the styling of the spec compliance comparison - -# 2.1.1 - -## Fixes - -* Oops, type defs didn't end up in the tarball, ty [@jorgegonzalez](https://github.com/jorgegonzalez)‼ - -# 2.1.0 - -## Features - -* Types for typescript support, thank you [@momocow](https://github.com/momocow)! - -## Fixes - -* stringify: always strip invalid dates. This fixes a bug where an - invalid date in an inline array would not be removed and would instead - result in an error. -* stringify: if an invalid type is found make sure it's thrown as an - error object. Previously the type name was, unhelpfully, being thrown. -* stringify: Multiline strings ending in a quote would generate invalid TOML. -* parse: Error if a signed integer has a leading zero, eg, `-01` or `+01`. -* parse: Error if \_ appears at the end of the integer part of a float, eg `1_.0`. \_ is only valid between _digits_. - -## Fun - -* BurntSushi's comprehensive TOML 0.4.0 test suite is now used in addition to our existing test suite. -* You can see exactly how the other JS TOML libraries stack up in testing - against both BurntSushi's tests and my own in the new - [TOML-SPEC-SUPPORT](TOML-SPEC-SUPPORT.md) doc. - -# 2.0.0 - -With 2.0.0, @iarna/toml supports the TOML v0.5.0 specification. TOML 0.5.0 -brings some changes: - -* Delete characters (U+007F) are not allowed in plain strings. You can include them with - escaped unicode characters, eg `\u007f`. -* Integers are specified as being 64bit unsigned values. These are - supported using `BigInt`s if you are using Node 10 or later. -* Keys may be literal strings, that is, you can use single quoted strings to - quote key names, so the following is now valid: - 'a"b"c' = 123 -* The floating point values `nan`, `inf` and `-inf` are supported. The stringifier will no - longer strip NaN, Infinity and -Infinity, instead serializing them as these new values.. -* Datetimes can separate the date and time with a space instead of a T, so - `2017-12-01T00:00:00Z` can be written as `2017-12-01 00:00:00Z`. -* Datetimes can be floating, that is, they can be represented without a timezone. - These are represented in javascript as Date objects whose `isFloating` property is true and - whose `toISOString` method will return a representation without a timezone. -* Dates without times are now supported. Dates do not have timezones. Dates - are represented in javascript as a Date object whose `isDate` property is true and - whose `toISOString` method returns just the date. -* Times without dates are now supported. Times do not have timezones. Times - are represented in javascript as a Date object whose `isTime` property is true and - whose `toISOString` method returns just the time. -* Keys can now include dots to directly address deeper structures, so `a.b = 23` is - the equivalent of `a = {b = 23}` or ```[a] -b = 23```. These can be used both as keys to regular tables and inline tables. -* Integers can now be specified in binary, octal and hexadecimal by prefixing the - number with `0b`, `0o` and `0x` respectively. It is now illegal to left - pad a decimal value with zeros. - -Some parser details were also fixed: - -* Negative zero (`-0.0`) and positive zero (`0.0`) are distinct floating point values. -* Negative integer zero (`-0`) is not distinguished from positive zero (`0`). - -# 1.7.1 - -Another 18% speed boost on our overall benchmarks! This time it came from -switching from string comparisons to integer by converting each character to -its respective code point. This also necessitated rewriting the boolean -parser to actually parse character-by-character as it should. End-of-stream -is now marked with a numeric value outside of the Unicode range, rather than -a Symbol, meaning that the parser's char property is now monomorphic. - -Bug fix, previously, `'abc''def'''` was accepted (as the value: `abcdef`). -Now it will correctly raise an error. - -Spec tests now run against bombadil as well (it fails some, which is unsurprising -given its incomplete state). - -# 1.7.0 - -This release features an overall 15% speed boost on our benchmarks. This -came from a few things: - -* Date parsing was rewritten to not use regexps, resulting in a huge speed increase. -* Strings of all kinds and bare keywords now use tight loops to collect characters when this will help. -* Regexps in general were mostly removed. This didn't result in a speed - change, but it did allow refactoring the parser to be a lot easier to - follow. -* The internal state tracking now uses a class and is constructed with a - fixed set of properties, allowing v8's optimizer to be more effective. - -In the land of new features: - -* Errors in the syntax of your TOML will now have the `fromTOML` property - set to true. This is in addition to the `line`, `col` and `pos` - properties they already have. - - The main use of this is to make it possible to distinguish between errors - in the TOML and errors in the parser code itself. This is of particular utility - when testing parse errors. - -# 1.6.0 - -**FIXES** - -* TOML.stringify: Allow toJSON properties that aren't functions, to align with JSON.stringify's behavior. -* TOML.stringify: Don't use ever render keys as literal strings. -* TOML.stringify: Don't try to escape control characters in literal strings. - -**FEATURES** - -* New Export: TOML.stringify.value, for encoding a stand alone inline value as TOML would. This produces - a TOML fragment, not a complete valid document. - -# 1.5.6 - -* String literals are NOT supported as key names. -* Accessing a shallower table after accessing it more deeply is ok and no longer crashes, eg: - ```toml - [a.b] - [a] - ``` -* Unicode characters in the reserved range now crash. -* Empty bare keys, eg `[.abc]` or `[]` now crash. -* Multiline backslash trimming supports CRs. -* Multiline post quote trimming supports CRs. -* Strings may not contain bare control chars (0x00-0x1f), except for \n, \r and \t. - -# 1.5.5 - -* Yet MORE README fixes. 🙃 - -# 1.5.4 - -* README fix - -# 1.5.3 - -* Benchmarks! -* More tests! -* More complete LICENSE information (some dev files are from other, MIT - licensed, projects, this is now more explicitly documented.) - -# 1.5.2 - -* parse: Arrays with mixed types now throw errors, per the spec. -* parse: Fix a parser bug that would result in errors when trying to parse arrays of numbers or dates - that were not separated by a space from the closing ]. -* parse: Fix a bug in the error pretty printer that resulted in errors on - the first line not getting the pretty print treatment. -* stringify: Fix long standing bug where an array of Numbers, some of which required - decimals, would be emitted in a way that parsers would treat as mixed - Integer and Float values. Now if any Numbers in an array must be - represented with a decimal then all will be emitted such that parsers will - understand them to be Float. - -# 1.5.1 - -* README fix - -# 1.5.0 - -* A brand new TOML parser, from scratch, that performs like `toml-j0.4` - without the crashes and with vastly better error messages. -* 100% test coverage for both the new parser and the existing stringifier. Some subtle bugs squashed! - -# v1.4.2 - -* Revert fallback due to its having issues with the same files. (New plan - will be to write my own.) - -# v1.4.1 - -* Depend on both `toml` and `toml-j0.4` with fallback from the latter to the - former when the latter crashes. - -# v1.4.0 - -* Ducktype dates to make them compatible with `moment` and other `Date` replacements. - -# v1.3.1 - -* Update docs with new toml module. - -# v1.3.0 - -* Switch from `toml` to `toml-j0.4`, which is between 20x and 200x faster. - (The larger the input, the faster it is compared to `toml`). - -# v1.2.0 - -* Return null when passed in null as the top level object. -* Detect and skip invalid dates and numbers - -# v1.1.0 - -* toJSON transformations are now honored (for everything except Date objects, as JSON represents them as strings). -* Undefined/null values no longer result in exceptions, they now just result in the associated key being elided. - -# v1.0.1 - -* Initial release diff --git a/node_modules/@iarna/toml/LICENSE b/node_modules/@iarna/toml/LICENSE deleted file mode 100755 index 51bcf57..0000000 --- a/node_modules/@iarna/toml/LICENSE +++ /dev/null @@ -1,14 +0,0 @@ -Copyright (c) 2016, Rebecca Turner - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - diff --git a/node_modules/@iarna/toml/README.md b/node_modules/@iarna/toml/README.md deleted file mode 100755 index 1958324..0000000 --- a/node_modules/@iarna/toml/README.md +++ /dev/null @@ -1,301 +0,0 @@ -# @iarna/toml - -Better TOML parsing and stringifying all in that familiar JSON interface. - -[![Coverage Status](https://coveralls.io/repos/github/iarna/iarna-toml/badge.svg)](https://coveralls.io/github/iarna/iarna-toml) - -# ** TOML 0.5.0 ** - -### TOML Spec Support - -The most recent version as of 2018-07-26: [v0.5.0](https://github.com/mojombo/toml/blob/master/versions/en/toml-v0.5.0.md) - -### Example - -```js -const TOML = require('@iarna/toml') -const obj = TOML.parse(`[abc] -foo = 123 -bar = [1,2,3]`) -/* obj = -{abc: {foo: 123, bar: [1,2,3]}} -*/ -const str = TOML.stringify(obj) -/* str = -[abc] -foo = 123 -bar = [ 1, 2, 3 ] -*/ -``` - -Visit the project github [for more examples](https://github.com/iarna/iarna-toml/tree/latest/examples)! - - -## Why @iarna/toml - -* See [TOML-SPEC-SUPPORT](https://shared.by.re-becca.org/misc/TOML-SPEC-SUPPORT.html) - for a comparison of which TOML features are supported by the various - Node.js TOML parsers. -* BigInt support on Node 10! -* 100% test coverage. -* Fast parsing. It's as much as 100 times - faster than `toml` and 3 times faster than `toml-j0.4`. However a recent - newcomer [`@ltd/j-toml`](https://www.npmjs.com/package/@ltd/j-toml) has - appeared with 0.5 support and astoundingly fast parsing speeds for large - text blocks. All I can say is you'll have to test your specific work loads - if you want to know which of @iarna/toml and @ltd/j-toml is faster for - you, as we currently excell in different areas. -* Careful adherence to spec. Tests go beyond simple coverage. -* Smallest parser bundle (if you use `@iarna/toml/parse-string`). -* No deps. -* Detailed and easy to read error messages‼ - -```console -> TOML.parse(src) -Error: Unexpected character, expecting string, number, datetime, boolean, inline array or inline table at row 6, col 5, pos 87: -5: "abc\"" = { abc=123,def="abc" } -6> foo=sdkfj - ^ -7: -``` - -## TOML.parse(str) → Object [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse.js) - -Also available with: `require('@iarna/toml/parse-string')` - -Synchronously parse a TOML string and return an object. - - -## TOML.stringify(obj) → String [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/stringify.js) - -Also available with: `require('@iarna/toml/stringify)` - -Serialize an object as TOML. - -## [your-object].toJSON - -If an object `TOML.stringify` is serializing has a `toJSON` method then it -will call it to transform the object before serializing it. This matches -the behavior of `JSON.stringify`. - -The one exception to this is that `toJSON` is not called for `Date` objects -because `JSON` represents dates as strings and TOML can represent them natively. - -[`moment`](https://www.npmjs.com/package/moment) objects are treated the -same as native `Date` objects, in this respect. - -## TOML.stringify.value(obj) -> String - -Also available with: `require('@iarna/toml/stringify').value` - -Serialize a value as TOML would. This is a fragment and not a complete -valid TOML document. - -## Promises and Streaming - -The parser provides alternative async and streaming interfaces, for times -that you're working with really absurdly big TOML files and don't want to -tie-up the event loop while it parses. - -### TOML.parse.async(str[, opts]) → Promise(Object) [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-async.js) - -Also available with: `require('@iarna/toml/parse-async')` - -`opts.blocksize` is the amount text to parser per pass through the event loop. Defaults to 40kb. - -Asynchronously parse a TOML string and return a promise of the resulting object. - -### TOML.parse.stream(readable) → Promise(Object) [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-stream-readable.js) - -Also available with: `require('@iarna/toml/parse-stream')` - -Given a readable stream, parse it as it feeds us data. Return a promise of the resulting object. - -### readable.pipe(TOML.parse.stream()) → Transform [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-stream-through.js) - -Also available with: `require('@iarna/toml/parse-stream')` - -Returns a transform stream in object mode. When it completes, emit the -resulting object. Only one object will ever be emitted. - -## Lowlevel Interface [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-lowlevel.js) [(example w/ parser debugging)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-lowlevel-debug.js) - -You construct a parser object, per TOML file you want to process: - -```js -const TOMLParser = require('@iarna/toml/lib/toml-parser.js') -const parser = new TOMLParser() -``` - -Then you call the `parse` method for each chunk as you read them, or in a -single call: - -```js -parser.parse(`hello = 'world'`) -``` - -And finally, you call the `finish` method to complete parsing and retrieve -the resulting object. - -```js -const data = parser.finish() -``` - -Both the `parse` method and `finish` method will throw if they find a -problem with the string they were given. Error objects thrown from the -parser have `pos`, `line` and `col` attributes. `TOML.parse` adds a visual -summary of where in the source string there were issues using -`parse-pretty-error` and you can too: - -```js -const prettyError = require('./parse-pretty-error.js') -const newErr = prettyError(err, sourceString) -``` - -## What's Different - -Version 2 of this module supports TOML 0.5.0. Other modules currently -published to the npm registry support 0.4.0. 0.5.0 is mostly backwards -compatible with 0.4.0, but if you have need, you can install @iarna/toml@1 -to get a version of this module that supports 0.4.0. Please see the -[CHANGELOG](CHANGELOG.md#2.0.0) for details on exactly whats changed. - -## TOML we can't do - -* `-nan` is a valid TOML value and is converted into `NaN`. There is no way to - produce `-nan` when stringifying. Stringification will produce positive `nan`. -* Detecting and erroring on invalid utf8 documents: This is because Node's - UTF8 processing converts invalid sequences into the placeholder character - and does not have facilities for reporting these as errors instead. We - _can_ detect the placeholder character, but it's valid to intentionally - include them in documents, so erroring on them is not great. -* On versions of Node < 10, very large Integer values will lose precision. - On Node >=10, bigints are used. -* Floating/local dates and times are still represented by JavaScript Date - objects, which don't actually support these concepts. The objects - returned have been modified so that you can determine what kind of thing - they are (with `isFloating`, `isDate`, `isTime` properties) and that - their ISO representation (via `toISOString`) is representative of their - TOML value. They will correctly round trip if you pass them to - `TOML.stringify`. -* Binary, hexadecimal and octal values are converted to ordinary integers and - will be decimal if you stringify them. - -## Changes - -I write a by hand, honest-to-god, -[CHANGELOG](https://github.com/iarna/iarna-toml/blob/latest/CHANGELOG.md) -for this project. It's a description of what went into a release that you -the consumer of the module could care about, not a list of git commits, so -please check it out! - -## Benchmarks - -You can run them yourself with: - -```console -$ npm run benchmark -``` - -The results below are from my desktop using Node 13.13.0. The library -versions tested were `@iarna/toml@2.2.4`, `toml-j0.4@1.1.1`, `toml@3.0.0`, -`@sgarciac/bombadil@2.3.0`, `@ltd/j-toml@0.5.107`, and `fast-toml@0.5.4`. The speed value is -megabytes-per-second that the parser can process of that document type. -Bigger is better. The percentage after average results is the margin of error. - -New here is fast-toml. fast-toml is very fast, for some datatypes, but it -also is missing most error checking demanded by the spec. For 0.4, it is -complete except for detail of multiline strings caught by the compliance -tests. Its support for 0.5 is incomplete. Check out the -[spec compliance](https://shared.by.re-becca.org/misc/TOML-SPEC-SUPPORT.html) doc -for details. - -As this table is getting a little wide, with how npm and github display it, -you can also view it seperately in the -[BENCHMARK](https://shared.by.re-becca.org/misc/BENCHMARK.html) document. - -| | @iarna/toml | toml-j0.4 | toml | @sgarciac/bombadil | @ltd/j-toml | fast-toml | -| - | :---------: | :-------: | :--: | :----------------: | :---------: | :-------: | -| **Overall** | 28MB/sec
0.35% | 6.5MB/sec
0.25% | 0.2MB/sec
0.70% | - | 35MB/sec
0.23% | - | -| **Spec Example: v0.4.0** | 26MB/sec
0.37% | 10MB/sec
0.27% | 1MB/sec
0.42% | 1.2MB/sec
0.95% | 28MB/sec
0.31% | - | -| **Spec Example: Hard Unicode** | 64MB/sec
0.59% | 18MB/sec
0.12% | 2MB/sec
0.20% | 0.6MB/sec
0.53% | 68MB/sec
0.31% | 78MB/sec
0.28% | -| **Types: Array, Inline** | 7.3MB/sec
0.60% | 4MB/sec
0.16% | 0.1MB/sec
0.91% | 1.3MB/sec
0.81% | 10MB/sec
0.35% | 9MB/sec
0.16% | -| **Types: Array** | 6.8MB/sec
0.19% | 6.7MB/sec
0.15% | 0.2MB/sec
0.79% | 1.2MB/sec
0.93% | 8.8MB/sec
0.47% | 27MB/sec
0.21% | -| **Types: Boolean,** | 21MB/sec
0.20% | 9.4MB/sec
0.17% | 0.2MB/sec
0.96% | 1.8MB/sec
0.70% | 16MB/sec
0.20% | 8.4MB/sec
0.22% | -| **Types: Datetime** | 18MB/sec
0.14% | 11MB/sec
0.15% | 0.3MB/sec
0.85% | 1.6MB/sec
0.45% | 9.8MB/sec
0.48% | 6.5MB/sec
0.23% | -| **Types: Float** | 8.8MB/sec
0.09% | 5.9MB/sec
0.14% | 0.2MB/sec
0.51% | 2.1MB/sec
0.82% | 14MB/sec
0.15% | 7.9MB/sec
0.14% | -| **Types: Int** | 5.9MB/sec
0.11% | 4.5MB/sec
0.28% | 0.1MB/sec
0.78% | 1.5MB/sec
0.64% | 10MB/sec
0.14% | 8MB/sec
0.17% | -| **Types: Literal String, 7 char** | 26MB/sec
0.29% | 8.5MB/sec
0.32% | 0.3MB/sec
0.84% | 2.3MB/sec
1.02% | 23MB/sec
0.15% | 13MB/sec
0.15% | -| **Types: Literal String, 92 char** | 46MB/sec
0.19% | 11MB/sec
0.20% | 0.3MB/sec
0.56% | 12MB/sec
0.92% | 101MB/sec
0.17% | 75MB/sec
0.29% | -| **Types: Literal String, Multiline, 1079 char** | 22MB/sec
0.42% | 6.7MB/sec
0.55% | 0.9MB/sec
0.78% | 44MB/sec
1.00% | 350MB/sec
0.16% | 636MB/sec
0.16% | -| **Types: Basic String, 7 char** | 25MB/sec
0.15% | 7.3MB/sec
0.18% | 0.2MB/sec
0.96% | 2.2MB/sec
1.09% | 14MB/sec
0.16% | 12MB/sec
0.22% | -| **Types: Basic String, 92 char** | 43MB/sec
0.30% | 7.2MB/sec
0.16% | 0.1MB/sec
4.04% | 12MB/sec
1.33% | 71MB/sec
0.19% | 70MB/sec
0.23% | -| **Types: Basic String, 1079 char** | 24MB/sec
0.45% | 5.8MB/sec
0.17% | 0.1MB/sec
3.64% | 44MB/sec
1.05% | 93MB/sec
0.29% | 635MB/sec
0.28% | -| **Types: Table, Inline** | 9.7MB/sec
0.10% | 5.5MB/sec
0.22% | 0.1MB/sec
0.87% | 1.4MB/sec
1.18% | 8.7MB/sec
0.60% | 8.7MB/sec
0.22% | -| **Types: Table** | 7.1MB/sec
0.14% | 5.6MB/sec
0.42% | 0.1MB/sec
0.65% | 1.4MB/sec
1.11% | 7.4MB/sec
0.70% | 18MB/sec
0.20% | -| **Scaling: Array, Inline, 1000 elements** | 40MB/sec
0.21% | 2.4MB/sec
0.19% | 0.1MB/sec
0.35% | 1.6MB/sec
1.02% | 17MB/sec
0.15% | 32MB/sec
0.16% | -| **Scaling: Array, Nested, 1000 deep** | 2MB/sec
0.15% | 1.7MB/sec
0.26% | 0.3MB/sec
0.58% | - | 1.8MB/sec
0.74% | 13MB/sec
0.20% | -| **Scaling: Literal String, 40kb** | 61MB/sec
0.18% | 10MB/sec
0.15% | 3MB/sec
0.84% | 12MB/sec
0.51% | 551MB/sec
0.44% | 19kMB/sec
0.19% | -| **Scaling: Literal String, Multiline, 40kb** | 62MB/sec
0.16% | 5MB/sec
0.45% | 0.2MB/sec
1.70% | 11MB/sec
0.74% | 291MB/sec
0.24% | 21kMB/sec
0.22% | -| **Scaling: Basic String, Multiline, 40kb** | 62MB/sec
0.18% | 5.8MB/sec
0.38% | 2.9MB/sec
0.86% | 11MB/sec
0.41% | 949MB/sec
0.44% | 26kMB/sec
0.16% | -| **Scaling: Basic String, 40kb** | 59MB/sec
0.20% | 6.3MB/sec
0.17% | 0.2MB/sec
1.95% | 12MB/sec
0.44% | 508MB/sec
0.35% | 18kMB/sec
0.15% | -| **Scaling: Table, Inline, 1000 elements** | 28MB/sec
0.12% | 8.2MB/sec
0.19% | 0.3MB/sec
0.89% | 2.3MB/sec
1.14% | 5.3MB/sec
0.24% | 13MB/sec
0.20% | -| **Scaling: Table, Inline, Nested, 1000 deep** | 7.8MB/sec
0.28% | 5MB/sec
0.20% | 0.1MB/sec
0.84% | - | 3.2MB/sec
0.52% | 10MB/sec
0.23% | - -## Tests - -The test suite is maintained at 100% coverage: [![Coverage Status](https://coveralls.io/repos/github/iarna/iarna-toml/badge.svg)](https://coveralls.io/github/iarna/iarna-toml) - -The spec was carefully hand converted into a series of test framework -independent (and mostly language independent) assertions, as pairs of TOML -and YAML files. You can find those files here: -[spec-test](https://github.com/iarna/iarna-toml/blob/latest/test/spec-test/). -A number of examples of invalid Unicode were also written, but are difficult -to make use of in Node.js where Unicode errors are silently hidden. You can -find those here: [spec-test-disabled](https://github.com/iarna/iarna-toml/blob/latest/test/spec-test-disabled/). - -Further tests were written to increase coverage to 100%, these may be more -implementation specific, but they can be found in [coverage](https://github.com/iarna/iarna-toml/blob/latest/test/coverage.js) and -[coverage-error](https://github.com/iarna/iarna-toml/blob/latest/test/coverage-error.js). - -I've also written some quality assurance style tests, which don't contribute -to coverage but do cover scenarios that could easily be problematic for some -implementations can be found in: -[test/qa.js](https://github.com/iarna/iarna-toml/blob/latest/test/qa.js) and -[test/qa-error.js](https://github.com/iarna/iarna-toml/blob/latest/test/qa-error.js). - -All of the official example files from the TOML spec are run through this -parser and compared to the official YAML files when available. These files are from the TOML spec as of: -[357a4ba6](https://github.com/toml-lang/toml/tree/357a4ba6782e48ff26e646780bab11c90ed0a7bc) -and specifically are: - -* [github.com/toml-lang/toml/tree/357a4ba6/examples](https://github.com/toml-lang/toml/tree/357a4ba6782e48ff26e646780bab11c90ed0a7bc/examples) -* [github.com/toml-lang/toml/tree/357a4ba6/tests](https://github.com/toml-lang/toml/tree/357a4ba6782e48ff26e646780bab11c90ed0a7bc/tests) - -The stringifier is tested by round-tripping these same files, asserting that -`TOML.parse(sourcefile)` deepEqual -`TOML.parse(TOML.stringify(TOML.parse(sourcefile))`. This is done in -[test/roundtrip-examples.js](https://github.com/iarna/iarna-toml/blob/latest/test/round-tripping.js) -There are also some tests written to complete coverage from stringification in: -[test/stringify.js](https://github.com/iarna/iarna-toml/blob/latest/test/stringify.js) - -Tests for the async and streaming interfaces are in [test/async.js](https://github.com/iarna/iarna-toml/blob/latest/test/async.js) and [test/stream.js](https://github.com/iarna/iarna-toml/blob/latest/test/stream.js) respectively. - -Tests for the parsers debugging mode live in [test/devel.js](https://github.com/iarna/iarna-toml/blob/latest/test/devel.js). - -And finally, many more stringification tests were borrowed from [@othiym23](https://github.com/othiym23)'s -[toml-stream](https://npmjs.com/package/toml-stream) module. They were fetched as of -[b6f1e26b572d49742d49fa6a6d11524d003441fa](https://github.com/othiym23/toml-stream/tree/b6f1e26b572d49742d49fa6a6d11524d003441fa/test) and live in -[test/toml-stream](https://github.com/iarna/iarna-toml/blob/latest/test/toml-stream/). - -## Improvements to make - -* In stringify: - * Any way to produce comments. As a JSON stand-in I'm not too worried - about this. That said, a document orientated fork is something I'd like - to look at eventually… - * Stringification could use some work on its error reporting. It reports - _what's_ wrong, but not where in your data structure it was. -* Further optimize the parser: - * There are some debugging assertions left in the main parser, these should be moved to a subclass. - * Make the whole debugging parser thing work as a mixin instead of as a superclass. diff --git a/node_modules/@iarna/toml/index.d.ts b/node_modules/@iarna/toml/index.d.ts deleted file mode 100755 index d37e2b6..0000000 --- a/node_modules/@iarna/toml/index.d.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { Transform } from "stream"; - -type JsonArray = boolean[] | number[] | string[] | JsonMap[] | Date[] -type AnyJson = boolean | number | string | JsonMap | Date | JsonArray | JsonArray[] - -interface JsonMap { - [key: string]: AnyJson; -} - -interface ParseOptions { - /** - * The amount text to parser per pass through the event loop. Defaults to 40kb (`40000`). - */ - blocksize: number -} - -interface FuncParse { - /** - * Synchronously parse a TOML string and return an object. - */ - (toml: string): JsonMap - - /** - * Asynchronously parse a TOML string and return a promise of the resulting object. - */ - async (toml: string, options?: ParseOptions): Promise - - /** - * Given a readable stream, parse it as it feeds us data. Return a promise of the resulting object. - */ - stream (readable: NodeJS.ReadableStream): Promise - stream (): Transform -} - -interface FuncStringify { - /** - * Serialize an object as TOML. - * - * If an object `TOML.stringify` is serializing has a `toJSON` method - * then it will call it to transform the object before serializing it. - * This matches the behavior of JSON.stringify. - * - * The one exception to this is that `toJSON` is not called for `Date` objects - * because JSON represents dates as strings and TOML can represent them natively. - * - * `moment` objects are treated the same as native `Date` objects, in this respect. - */ - (obj: JsonMap): string - - /** - * Serialize a value as TOML would. This is a fragment and not a complete valid TOML document. - */ - value (any: AnyJson): string -} - -export const parse: FuncParse -export const stringify: FuncStringify diff --git a/node_modules/@iarna/toml/lib/create-date.js b/node_modules/@iarna/toml/lib/create-date.js deleted file mode 100755 index 469fc65..0000000 --- a/node_modules/@iarna/toml/lib/create-date.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' -const f = require('./format-num.js') -const DateTime = global.Date - -class Date extends DateTime { - constructor (value) { - super(value) - this.isDate = true - } - toISOString () { - return `${this.getUTCFullYear()}-${f(2, this.getUTCMonth() + 1)}-${f(2, this.getUTCDate())}` - } -} - -module.exports = value => { - const date = new Date(value) - /* istanbul ignore if */ - if (isNaN(date)) { - throw new TypeError('Invalid Datetime') - } else { - return date - } -} diff --git a/node_modules/@iarna/toml/lib/create-datetime-float.js b/node_modules/@iarna/toml/lib/create-datetime-float.js deleted file mode 100755 index 6fb30ac..0000000 --- a/node_modules/@iarna/toml/lib/create-datetime-float.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' -const f = require('./format-num.js') - -class FloatingDateTime extends Date { - constructor (value) { - super(value + 'Z') - this.isFloating = true - } - toISOString () { - const date = `${this.getUTCFullYear()}-${f(2, this.getUTCMonth() + 1)}-${f(2, this.getUTCDate())}` - const time = `${f(2, this.getUTCHours())}:${f(2, this.getUTCMinutes())}:${f(2, this.getUTCSeconds())}.${f(3, this.getUTCMilliseconds())}` - return `${date}T${time}` - } -} - -module.exports = value => { - const date = new FloatingDateTime(value) - /* istanbul ignore if */ - if (isNaN(date)) { - throw new TypeError('Invalid Datetime') - } else { - return date - } -} diff --git a/node_modules/@iarna/toml/lib/create-datetime.js b/node_modules/@iarna/toml/lib/create-datetime.js deleted file mode 100755 index 47deded..0000000 --- a/node_modules/@iarna/toml/lib/create-datetime.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict' -module.exports = value => { - const date = new Date(value) - /* istanbul ignore if */ - if (isNaN(date)) { - throw new TypeError('Invalid Datetime') - } else { - return date - } -} diff --git a/node_modules/@iarna/toml/lib/create-time.js b/node_modules/@iarna/toml/lib/create-time.js deleted file mode 100755 index 190c767..0000000 --- a/node_modules/@iarna/toml/lib/create-time.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' -const f = require('./format-num.js') - -class Time extends Date { - constructor (value) { - super(`0000-01-01T${value}Z`) - this.isTime = true - } - toISOString () { - return `${f(2, this.getUTCHours())}:${f(2, this.getUTCMinutes())}:${f(2, this.getUTCSeconds())}.${f(3, this.getUTCMilliseconds())}` - } -} - -module.exports = value => { - const date = new Time(value) - /* istanbul ignore if */ - if (isNaN(date)) { - throw new TypeError('Invalid Datetime') - } else { - return date - } -} diff --git a/node_modules/@iarna/toml/lib/format-num.js b/node_modules/@iarna/toml/lib/format-num.js deleted file mode 100755 index 6addf08..0000000 --- a/node_modules/@iarna/toml/lib/format-num.js +++ /dev/null @@ -1,6 +0,0 @@ -'use strict' -module.exports = (d, num) => { - num = String(num) - while (num.length < d) num = '0' + num - return num -} diff --git a/node_modules/@iarna/toml/lib/parser-debug.js b/node_modules/@iarna/toml/lib/parser-debug.js deleted file mode 100755 index e222f27..0000000 --- a/node_modules/@iarna/toml/lib/parser-debug.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict' -const Parser = require('./parser.js') -const util = require('util') - -const dump = _ => util.inspect(_, {colors: true, depth: 10, breakLength: Infinity}) -class DebugParser extends Parser { - stateName (state) { - // istanbul ignore next - return (state.parser && state.parser.name) || state.name || ('anonymous') - } - runOne () { - const callStack = this.stack.concat(this.state).map(_ => this.stateName(_)).join(' <- ') - console.log('RUN', callStack, dump({line: this.line, col: this.col, char: this.char, ret: this.state.returned})) - return super.runOne() - } - finish () { - const obj = super.finish() - // istanbul ignore if - if (this.stack.length !== 0) { - throw new Parser.Error('All states did not return by end of stream') - } - return obj - } - callStack () { - const callStack = this.stack.map(_ => this.stateName(_)).join(' ').replace(/\S/g, ' ') - return callStack ? callStack + ' ' : '' - } - next (fn) { - console.log(' ', this.callStack(), 'NEXT', this.stateName(fn)) - return super.next(fn) - } - goto (fn) { - console.log(' ', this.callStack(), 'GOTO', this.stateName(fn)) - super.next(fn) - return false - } - call (fn, returnWith) { - console.log(' ', this.callStack(), 'CALL', fn.name, returnWith ? '-> ' + returnWith.name : '') - if (returnWith) super.next(returnWith) - this.stack.push(this.state) - this.state = {parser: fn, buf: '', returned: null} - } - callNow (fn, returnWith) { - console.log(' ', this.callStack(), 'CALLNOW', fn.name, returnWith ? '-> ' + returnWith.name : '') - if (returnWith) super.next(returnWith) - this.stack.push(this.state) - this.state = {parser: fn, buf: '', returned: null} - return false - } - return (value) { - console.log(' ', this.callStack(), 'RETURN') - return super.return(value) - } - returnNow (value) { - console.log(' ', this.callStack(), 'RETURNNOW') - super.return(value) - return false - } -} -module.exports = DebugParser diff --git a/node_modules/@iarna/toml/lib/parser.js b/node_modules/@iarna/toml/lib/parser.js deleted file mode 100755 index 398856b..0000000 --- a/node_modules/@iarna/toml/lib/parser.js +++ /dev/null @@ -1,127 +0,0 @@ -'use strict' -const ParserEND = 0x110000 -class ParserError extends Error { - /* istanbul ignore next */ - constructor (msg, filename, linenumber) { - super('[ParserError] ' + msg, filename, linenumber) - this.name = 'ParserError' - this.code = 'ParserError' - if (Error.captureStackTrace) Error.captureStackTrace(this, ParserError) - } -} -class State { - constructor (parser) { - this.parser = parser - this.buf = '' - this.returned = null - this.result = null - this.resultTable = null - this.resultArr = null - } -} -class Parser { - constructor () { - this.pos = 0 - this.col = 0 - this.line = 0 - this.obj = {} - this.ctx = this.obj - this.stack = [] - this._buf = '' - this.char = null - this.ii = 0 - this.state = new State(this.parseStart) - } - - parse (str) { - /* istanbul ignore next */ - if (str.length === 0 || str.length == null) return - - this._buf = String(str) - this.ii = -1 - this.char = -1 - let getNext - while (getNext === false || this.nextChar()) { - getNext = this.runOne() - } - this._buf = null - } - nextChar () { - if (this.char === 0x0A) { - ++this.line - this.col = -1 - } - ++this.ii - this.char = this._buf.codePointAt(this.ii) - ++this.pos - ++this.col - return this.haveBuffer() - } - haveBuffer () { - return this.ii < this._buf.length - } - runOne () { - return this.state.parser.call(this, this.state.returned) - } - finish () { - this.char = ParserEND - let last - do { - last = this.state.parser - this.runOne() - } while (this.state.parser !== last) - - this.ctx = null - this.state = null - this._buf = null - - return this.obj - } - next (fn) { - /* istanbul ignore next */ - if (typeof fn !== 'function') throw new ParserError('Tried to set state to non-existent state: ' + JSON.stringify(fn)) - this.state.parser = fn - } - goto (fn) { - this.next(fn) - return this.runOne() - } - call (fn, returnWith) { - if (returnWith) this.next(returnWith) - this.stack.push(this.state) - this.state = new State(fn) - } - callNow (fn, returnWith) { - this.call(fn, returnWith) - return this.runOne() - } - return (value) { - /* istanbul ignore next */ - if (this.stack.length === 0) throw this.error(new ParserError('Stack underflow')) - if (value === undefined) value = this.state.buf - this.state = this.stack.pop() - this.state.returned = value - } - returnNow (value) { - this.return(value) - return this.runOne() - } - consume () { - /* istanbul ignore next */ - if (this.char === ParserEND) throw this.error(new ParserError('Unexpected end-of-buffer')) - this.state.buf += this._buf[this.ii] - } - error (err) { - err.line = this.line - err.col = this.col - err.pos = this.pos - return err - } - /* istanbul ignore next */ - parseStart () { - throw new ParserError('Must declare a parseStart method') - } -} -Parser.END = ParserEND -Parser.Error = ParserError -module.exports = Parser diff --git a/node_modules/@iarna/toml/lib/toml-parser.js b/node_modules/@iarna/toml/lib/toml-parser.js deleted file mode 100755 index ebcaec8..0000000 --- a/node_modules/@iarna/toml/lib/toml-parser.js +++ /dev/null @@ -1,1379 +0,0 @@ -'use strict' -/* eslint-disable no-new-wrappers, no-eval, camelcase, operator-linebreak */ -module.exports = makeParserClass(require('./parser.js')) -module.exports.makeParserClass = makeParserClass - -class TomlError extends Error { - constructor (msg) { - super(msg) - this.name = 'TomlError' - /* istanbul ignore next */ - if (Error.captureStackTrace) Error.captureStackTrace(this, TomlError) - this.fromTOML = true - this.wrapped = null - } -} -TomlError.wrap = err => { - const terr = new TomlError(err.message) - terr.code = err.code - terr.wrapped = err - return terr -} -module.exports.TomlError = TomlError - -const createDateTime = require('./create-datetime.js') -const createDateTimeFloat = require('./create-datetime-float.js') -const createDate = require('./create-date.js') -const createTime = require('./create-time.js') - -const CTRL_I = 0x09 -const CTRL_J = 0x0A -const CTRL_M = 0x0D -const CTRL_CHAR_BOUNDARY = 0x1F // the last non-character in the latin1 region of unicode, except DEL -const CHAR_SP = 0x20 -const CHAR_QUOT = 0x22 -const CHAR_NUM = 0x23 -const CHAR_APOS = 0x27 -const CHAR_PLUS = 0x2B -const CHAR_COMMA = 0x2C -const CHAR_HYPHEN = 0x2D -const CHAR_PERIOD = 0x2E -const CHAR_0 = 0x30 -const CHAR_1 = 0x31 -const CHAR_7 = 0x37 -const CHAR_9 = 0x39 -const CHAR_COLON = 0x3A -const CHAR_EQUALS = 0x3D -const CHAR_A = 0x41 -const CHAR_E = 0x45 -const CHAR_F = 0x46 -const CHAR_T = 0x54 -const CHAR_U = 0x55 -const CHAR_Z = 0x5A -const CHAR_LOWBAR = 0x5F -const CHAR_a = 0x61 -const CHAR_b = 0x62 -const CHAR_e = 0x65 -const CHAR_f = 0x66 -const CHAR_i = 0x69 -const CHAR_l = 0x6C -const CHAR_n = 0x6E -const CHAR_o = 0x6F -const CHAR_r = 0x72 -const CHAR_s = 0x73 -const CHAR_t = 0x74 -const CHAR_u = 0x75 -const CHAR_x = 0x78 -const CHAR_z = 0x7A -const CHAR_LCUB = 0x7B -const CHAR_RCUB = 0x7D -const CHAR_LSQB = 0x5B -const CHAR_BSOL = 0x5C -const CHAR_RSQB = 0x5D -const CHAR_DEL = 0x7F -const SURROGATE_FIRST = 0xD800 -const SURROGATE_LAST = 0xDFFF - -const escapes = { - [CHAR_b]: '\u0008', - [CHAR_t]: '\u0009', - [CHAR_n]: '\u000A', - [CHAR_f]: '\u000C', - [CHAR_r]: '\u000D', - [CHAR_QUOT]: '\u0022', - [CHAR_BSOL]: '\u005C' -} - -function isDigit (cp) { - return cp >= CHAR_0 && cp <= CHAR_9 -} -function isHexit (cp) { - return (cp >= CHAR_A && cp <= CHAR_F) || (cp >= CHAR_a && cp <= CHAR_f) || (cp >= CHAR_0 && cp <= CHAR_9) -} -function isBit (cp) { - return cp === CHAR_1 || cp === CHAR_0 -} -function isOctit (cp) { - return (cp >= CHAR_0 && cp <= CHAR_7) -} -function isAlphaNumQuoteHyphen (cp) { - return (cp >= CHAR_A && cp <= CHAR_Z) - || (cp >= CHAR_a && cp <= CHAR_z) - || (cp >= CHAR_0 && cp <= CHAR_9) - || cp === CHAR_APOS - || cp === CHAR_QUOT - || cp === CHAR_LOWBAR - || cp === CHAR_HYPHEN -} -function isAlphaNumHyphen (cp) { - return (cp >= CHAR_A && cp <= CHAR_Z) - || (cp >= CHAR_a && cp <= CHAR_z) - || (cp >= CHAR_0 && cp <= CHAR_9) - || cp === CHAR_LOWBAR - || cp === CHAR_HYPHEN -} -const _type = Symbol('type') -const _declared = Symbol('declared') - -const hasOwnProperty = Object.prototype.hasOwnProperty -const defineProperty = Object.defineProperty -const descriptor = {configurable: true, enumerable: true, writable: true, value: undefined} - -function hasKey (obj, key) { - if (hasOwnProperty.call(obj, key)) return true - if (key === '__proto__') defineProperty(obj, '__proto__', descriptor) - return false -} - -const INLINE_TABLE = Symbol('inline-table') -function InlineTable () { - return Object.defineProperties({}, { - [_type]: {value: INLINE_TABLE} - }) -} -function isInlineTable (obj) { - if (obj === null || typeof (obj) !== 'object') return false - return obj[_type] === INLINE_TABLE -} - -const TABLE = Symbol('table') -function Table () { - return Object.defineProperties({}, { - [_type]: {value: TABLE}, - [_declared]: {value: false, writable: true} - }) -} -function isTable (obj) { - if (obj === null || typeof (obj) !== 'object') return false - return obj[_type] === TABLE -} - -const _contentType = Symbol('content-type') -const INLINE_LIST = Symbol('inline-list') -function InlineList (type) { - return Object.defineProperties([], { - [_type]: {value: INLINE_LIST}, - [_contentType]: {value: type} - }) -} -function isInlineList (obj) { - if (obj === null || typeof (obj) !== 'object') return false - return obj[_type] === INLINE_LIST -} - -const LIST = Symbol('list') -function List () { - return Object.defineProperties([], { - [_type]: {value: LIST} - }) -} -function isList (obj) { - if (obj === null || typeof (obj) !== 'object') return false - return obj[_type] === LIST -} - -// in an eval, to let bundlers not slurp in a util proxy -let _custom -try { - const utilInspect = eval("require('util').inspect") - _custom = utilInspect.custom -} catch (_) { - /* eval require not available in transpiled bundle */ -} -/* istanbul ignore next */ -const _inspect = _custom || 'inspect' - -class BoxedBigInt { - constructor (value) { - try { - this.value = global.BigInt.asIntN(64, value) - } catch (_) { - /* istanbul ignore next */ - this.value = null - } - Object.defineProperty(this, _type, {value: INTEGER}) - } - isNaN () { - return this.value === null - } - /* istanbul ignore next */ - toString () { - return String(this.value) - } - /* istanbul ignore next */ - [_inspect] () { - return `[BigInt: ${this.toString()}]}` - } - valueOf () { - return this.value - } -} - -const INTEGER = Symbol('integer') -function Integer (value) { - let num = Number(value) - // -0 is a float thing, not an int thing - if (Object.is(num, -0)) num = 0 - /* istanbul ignore else */ - if (global.BigInt && !Number.isSafeInteger(num)) { - return new BoxedBigInt(value) - } else { - /* istanbul ignore next */ - return Object.defineProperties(new Number(num), { - isNaN: {value: function () { return isNaN(this) }}, - [_type]: {value: INTEGER}, - [_inspect]: {value: () => `[Integer: ${value}]`} - }) - } -} -function isInteger (obj) { - if (obj === null || typeof (obj) !== 'object') return false - return obj[_type] === INTEGER -} - -const FLOAT = Symbol('float') -function Float (value) { - /* istanbul ignore next */ - return Object.defineProperties(new Number(value), { - [_type]: {value: FLOAT}, - [_inspect]: {value: () => `[Float: ${value}]`} - }) -} -function isFloat (obj) { - if (obj === null || typeof (obj) !== 'object') return false - return obj[_type] === FLOAT -} - -function tomlType (value) { - const type = typeof value - if (type === 'object') { - /* istanbul ignore if */ - if (value === null) return 'null' - if (value instanceof Date) return 'datetime' - /* istanbul ignore else */ - if (_type in value) { - switch (value[_type]) { - case INLINE_TABLE: return 'inline-table' - case INLINE_LIST: return 'inline-list' - /* istanbul ignore next */ - case TABLE: return 'table' - /* istanbul ignore next */ - case LIST: return 'list' - case FLOAT: return 'float' - case INTEGER: return 'integer' - } - } - } - return type -} - -function makeParserClass (Parser) { - class TOMLParser extends Parser { - constructor () { - super() - this.ctx = this.obj = Table() - } - - /* MATCH HELPER */ - atEndOfWord () { - return this.char === CHAR_NUM || this.char === CTRL_I || this.char === CHAR_SP || this.atEndOfLine() - } - atEndOfLine () { - return this.char === Parser.END || this.char === CTRL_J || this.char === CTRL_M - } - - parseStart () { - if (this.char === Parser.END) { - return null - } else if (this.char === CHAR_LSQB) { - return this.call(this.parseTableOrList) - } else if (this.char === CHAR_NUM) { - return this.call(this.parseComment) - } else if (this.char === CTRL_J || this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M) { - return null - } else if (isAlphaNumQuoteHyphen(this.char)) { - return this.callNow(this.parseAssignStatement) - } else { - throw this.error(new TomlError(`Unknown character "${this.char}"`)) - } - } - - // HELPER, this strips any whitespace and comments to the end of the line - // then RETURNS. Last state in a production. - parseWhitespaceToEOL () { - if (this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M) { - return null - } else if (this.char === CHAR_NUM) { - return this.goto(this.parseComment) - } else if (this.char === Parser.END || this.char === CTRL_J) { - return this.return() - } else { - throw this.error(new TomlError('Unexpected character, expected only whitespace or comments till end of line')) - } - } - - /* ASSIGNMENT: key = value */ - parseAssignStatement () { - return this.callNow(this.parseAssign, this.recordAssignStatement) - } - recordAssignStatement (kv) { - let target = this.ctx - let finalKey = kv.key.pop() - for (let kw of kv.key) { - if (hasKey(target, kw) && (!isTable(target[kw]) || target[kw][_declared])) { - throw this.error(new TomlError("Can't redefine existing key")) - } - target = target[kw] = target[kw] || Table() - } - if (hasKey(target, finalKey)) { - throw this.error(new TomlError("Can't redefine existing key")) - } - // unbox our numbers - if (isInteger(kv.value) || isFloat(kv.value)) { - target[finalKey] = kv.value.valueOf() - } else { - target[finalKey] = kv.value - } - return this.goto(this.parseWhitespaceToEOL) - } - - /* ASSSIGNMENT expression, key = value possibly inside an inline table */ - parseAssign () { - return this.callNow(this.parseKeyword, this.recordAssignKeyword) - } - recordAssignKeyword (key) { - if (this.state.resultTable) { - this.state.resultTable.push(key) - } else { - this.state.resultTable = [key] - } - return this.goto(this.parseAssignKeywordPreDot) - } - parseAssignKeywordPreDot () { - if (this.char === CHAR_PERIOD) { - return this.next(this.parseAssignKeywordPostDot) - } else if (this.char !== CHAR_SP && this.char !== CTRL_I) { - return this.goto(this.parseAssignEqual) - } - } - parseAssignKeywordPostDot () { - if (this.char !== CHAR_SP && this.char !== CTRL_I) { - return this.callNow(this.parseKeyword, this.recordAssignKeyword) - } - } - - parseAssignEqual () { - if (this.char === CHAR_EQUALS) { - return this.next(this.parseAssignPreValue) - } else { - throw this.error(new TomlError('Invalid character, expected "="')) - } - } - parseAssignPreValue () { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else { - return this.callNow(this.parseValue, this.recordAssignValue) - } - } - recordAssignValue (value) { - return this.returnNow({key: this.state.resultTable, value: value}) - } - - /* COMMENTS: #...eol */ - parseComment () { - do { - if (this.char === Parser.END || this.char === CTRL_J) { - return this.return() - } - } while (this.nextChar()) - } - - /* TABLES AND LISTS, [foo] and [[foo]] */ - parseTableOrList () { - if (this.char === CHAR_LSQB) { - this.next(this.parseList) - } else { - return this.goto(this.parseTable) - } - } - - /* TABLE [foo.bar.baz] */ - parseTable () { - this.ctx = this.obj - return this.goto(this.parseTableNext) - } - parseTableNext () { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else { - return this.callNow(this.parseKeyword, this.parseTableMore) - } - } - parseTableMore (keyword) { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else if (this.char === CHAR_RSQB) { - if (hasKey(this.ctx, keyword) && (!isTable(this.ctx[keyword]) || this.ctx[keyword][_declared])) { - throw this.error(new TomlError("Can't redefine existing key")) - } else { - this.ctx = this.ctx[keyword] = this.ctx[keyword] || Table() - this.ctx[_declared] = true - } - return this.next(this.parseWhitespaceToEOL) - } else if (this.char === CHAR_PERIOD) { - if (!hasKey(this.ctx, keyword)) { - this.ctx = this.ctx[keyword] = Table() - } else if (isTable(this.ctx[keyword])) { - this.ctx = this.ctx[keyword] - } else if (isList(this.ctx[keyword])) { - this.ctx = this.ctx[keyword][this.ctx[keyword].length - 1] - } else { - throw this.error(new TomlError("Can't redefine existing key")) - } - return this.next(this.parseTableNext) - } else { - throw this.error(new TomlError('Unexpected character, expected whitespace, . or ]')) - } - } - - /* LIST [[a.b.c]] */ - parseList () { - this.ctx = this.obj - return this.goto(this.parseListNext) - } - parseListNext () { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else { - return this.callNow(this.parseKeyword, this.parseListMore) - } - } - parseListMore (keyword) { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else if (this.char === CHAR_RSQB) { - if (!hasKey(this.ctx, keyword)) { - this.ctx[keyword] = List() - } - if (isInlineList(this.ctx[keyword])) { - throw this.error(new TomlError("Can't extend an inline array")) - } else if (isList(this.ctx[keyword])) { - const next = Table() - this.ctx[keyword].push(next) - this.ctx = next - } else { - throw this.error(new TomlError("Can't redefine an existing key")) - } - return this.next(this.parseListEnd) - } else if (this.char === CHAR_PERIOD) { - if (!hasKey(this.ctx, keyword)) { - this.ctx = this.ctx[keyword] = Table() - } else if (isInlineList(this.ctx[keyword])) { - throw this.error(new TomlError("Can't extend an inline array")) - } else if (isInlineTable(this.ctx[keyword])) { - throw this.error(new TomlError("Can't extend an inline table")) - } else if (isList(this.ctx[keyword])) { - this.ctx = this.ctx[keyword][this.ctx[keyword].length - 1] - } else if (isTable(this.ctx[keyword])) { - this.ctx = this.ctx[keyword] - } else { - throw this.error(new TomlError("Can't redefine an existing key")) - } - return this.next(this.parseListNext) - } else { - throw this.error(new TomlError('Unexpected character, expected whitespace, . or ]')) - } - } - parseListEnd (keyword) { - if (this.char === CHAR_RSQB) { - return this.next(this.parseWhitespaceToEOL) - } else { - throw this.error(new TomlError('Unexpected character, expected whitespace, . or ]')) - } - } - - /* VALUE string, number, boolean, inline list, inline object */ - parseValue () { - if (this.char === Parser.END) { - throw this.error(new TomlError('Key without value')) - } else if (this.char === CHAR_QUOT) { - return this.next(this.parseDoubleString) - } if (this.char === CHAR_APOS) { - return this.next(this.parseSingleString) - } else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) { - return this.goto(this.parseNumberSign) - } else if (this.char === CHAR_i) { - return this.next(this.parseInf) - } else if (this.char === CHAR_n) { - return this.next(this.parseNan) - } else if (isDigit(this.char)) { - return this.goto(this.parseNumberOrDateTime) - } else if (this.char === CHAR_t || this.char === CHAR_f) { - return this.goto(this.parseBoolean) - } else if (this.char === CHAR_LSQB) { - return this.call(this.parseInlineList, this.recordValue) - } else if (this.char === CHAR_LCUB) { - return this.call(this.parseInlineTable, this.recordValue) - } else { - throw this.error(new TomlError('Unexpected character, expecting string, number, datetime, boolean, inline array or inline table')) - } - } - recordValue (value) { - return this.returnNow(value) - } - - parseInf () { - if (this.char === CHAR_n) { - return this.next(this.parseInf2) - } else { - throw this.error(new TomlError('Unexpected character, expected "inf", "+inf" or "-inf"')) - } - } - parseInf2 () { - if (this.char === CHAR_f) { - if (this.state.buf === '-') { - return this.return(-Infinity) - } else { - return this.return(Infinity) - } - } else { - throw this.error(new TomlError('Unexpected character, expected "inf", "+inf" or "-inf"')) - } - } - - parseNan () { - if (this.char === CHAR_a) { - return this.next(this.parseNan2) - } else { - throw this.error(new TomlError('Unexpected character, expected "nan"')) - } - } - parseNan2 () { - if (this.char === CHAR_n) { - return this.return(NaN) - } else { - throw this.error(new TomlError('Unexpected character, expected "nan"')) - } - } - - /* KEYS, barewords or basic, literal, or dotted */ - parseKeyword () { - if (this.char === CHAR_QUOT) { - return this.next(this.parseBasicString) - } else if (this.char === CHAR_APOS) { - return this.next(this.parseLiteralString) - } else { - return this.goto(this.parseBareKey) - } - } - - /* KEYS: barewords */ - parseBareKey () { - do { - if (this.char === Parser.END) { - throw this.error(new TomlError('Key ended without value')) - } else if (isAlphaNumHyphen(this.char)) { - this.consume() - } else if (this.state.buf.length === 0) { - throw this.error(new TomlError('Empty bare keys are not allowed')) - } else { - return this.returnNow() - } - } while (this.nextChar()) - } - - /* STRINGS, single quoted (literal) */ - parseSingleString () { - if (this.char === CHAR_APOS) { - return this.next(this.parseLiteralMultiStringMaybe) - } else { - return this.goto(this.parseLiteralString) - } - } - parseLiteralString () { - do { - if (this.char === CHAR_APOS) { - return this.return() - } else if (this.atEndOfLine()) { - throw this.error(new TomlError('Unterminated string')) - } else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I)) { - throw this.errorControlCharInString() - } else { - this.consume() - } - } while (this.nextChar()) - } - parseLiteralMultiStringMaybe () { - if (this.char === CHAR_APOS) { - return this.next(this.parseLiteralMultiString) - } else { - return this.returnNow() - } - } - parseLiteralMultiString () { - if (this.char === CTRL_M) { - return null - } else if (this.char === CTRL_J) { - return this.next(this.parseLiteralMultiStringContent) - } else { - return this.goto(this.parseLiteralMultiStringContent) - } - } - parseLiteralMultiStringContent () { - do { - if (this.char === CHAR_APOS) { - return this.next(this.parseLiteralMultiEnd) - } else if (this.char === Parser.END) { - throw this.error(new TomlError('Unterminated multi-line string')) - } else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I && this.char !== CTRL_J && this.char !== CTRL_M)) { - throw this.errorControlCharInString() - } else { - this.consume() - } - } while (this.nextChar()) - } - parseLiteralMultiEnd () { - if (this.char === CHAR_APOS) { - return this.next(this.parseLiteralMultiEnd2) - } else { - this.state.buf += "'" - return this.goto(this.parseLiteralMultiStringContent) - } - } - parseLiteralMultiEnd2 () { - if (this.char === CHAR_APOS) { - return this.return() - } else { - this.state.buf += "''" - return this.goto(this.parseLiteralMultiStringContent) - } - } - - /* STRINGS double quoted */ - parseDoubleString () { - if (this.char === CHAR_QUOT) { - return this.next(this.parseMultiStringMaybe) - } else { - return this.goto(this.parseBasicString) - } - } - parseBasicString () { - do { - if (this.char === CHAR_BSOL) { - return this.call(this.parseEscape, this.recordEscapeReplacement) - } else if (this.char === CHAR_QUOT) { - return this.return() - } else if (this.atEndOfLine()) { - throw this.error(new TomlError('Unterminated string')) - } else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I)) { - throw this.errorControlCharInString() - } else { - this.consume() - } - } while (this.nextChar()) - } - recordEscapeReplacement (replacement) { - this.state.buf += replacement - return this.goto(this.parseBasicString) - } - parseMultiStringMaybe () { - if (this.char === CHAR_QUOT) { - return this.next(this.parseMultiString) - } else { - return this.returnNow() - } - } - parseMultiString () { - if (this.char === CTRL_M) { - return null - } else if (this.char === CTRL_J) { - return this.next(this.parseMultiStringContent) - } else { - return this.goto(this.parseMultiStringContent) - } - } - parseMultiStringContent () { - do { - if (this.char === CHAR_BSOL) { - return this.call(this.parseMultiEscape, this.recordMultiEscapeReplacement) - } else if (this.char === CHAR_QUOT) { - return this.next(this.parseMultiEnd) - } else if (this.char === Parser.END) { - throw this.error(new TomlError('Unterminated multi-line string')) - } else if (this.char === CHAR_DEL || (this.char <= CTRL_CHAR_BOUNDARY && this.char !== CTRL_I && this.char !== CTRL_J && this.char !== CTRL_M)) { - throw this.errorControlCharInString() - } else { - this.consume() - } - } while (this.nextChar()) - } - errorControlCharInString () { - let displayCode = '\\u00' - if (this.char < 16) { - displayCode += '0' - } - displayCode += this.char.toString(16) - - return this.error(new TomlError(`Control characters (codes < 0x1f and 0x7f) are not allowed in strings, use ${displayCode} instead`)) - } - recordMultiEscapeReplacement (replacement) { - this.state.buf += replacement - return this.goto(this.parseMultiStringContent) - } - parseMultiEnd () { - if (this.char === CHAR_QUOT) { - return this.next(this.parseMultiEnd2) - } else { - this.state.buf += '"' - return this.goto(this.parseMultiStringContent) - } - } - parseMultiEnd2 () { - if (this.char === CHAR_QUOT) { - return this.return() - } else { - this.state.buf += '""' - return this.goto(this.parseMultiStringContent) - } - } - parseMultiEscape () { - if (this.char === CTRL_M || this.char === CTRL_J) { - return this.next(this.parseMultiTrim) - } else if (this.char === CHAR_SP || this.char === CTRL_I) { - return this.next(this.parsePreMultiTrim) - } else { - return this.goto(this.parseEscape) - } - } - parsePreMultiTrim () { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else if (this.char === CTRL_M || this.char === CTRL_J) { - return this.next(this.parseMultiTrim) - } else { - throw this.error(new TomlError("Can't escape whitespace")) - } - } - parseMultiTrim () { - // explicitly whitespace here, END should follow the same path as chars - if (this.char === CTRL_J || this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M) { - return null - } else { - return this.returnNow() - } - } - parseEscape () { - if (this.char in escapes) { - return this.return(escapes[this.char]) - } else if (this.char === CHAR_u) { - return this.call(this.parseSmallUnicode, this.parseUnicodeReturn) - } else if (this.char === CHAR_U) { - return this.call(this.parseLargeUnicode, this.parseUnicodeReturn) - } else { - throw this.error(new TomlError('Unknown escape character: ' + this.char)) - } - } - parseUnicodeReturn (char) { - try { - const codePoint = parseInt(char, 16) - if (codePoint >= SURROGATE_FIRST && codePoint <= SURROGATE_LAST) { - throw this.error(new TomlError('Invalid unicode, character in range 0xD800 - 0xDFFF is reserved')) - } - return this.returnNow(String.fromCodePoint(codePoint)) - } catch (err) { - throw this.error(TomlError.wrap(err)) - } - } - parseSmallUnicode () { - if (!isHexit(this.char)) { - throw this.error(new TomlError('Invalid character in unicode sequence, expected hex')) - } else { - this.consume() - if (this.state.buf.length >= 4) return this.return() - } - } - parseLargeUnicode () { - if (!isHexit(this.char)) { - throw this.error(new TomlError('Invalid character in unicode sequence, expected hex')) - } else { - this.consume() - if (this.state.buf.length >= 8) return this.return() - } - } - - /* NUMBERS */ - parseNumberSign () { - this.consume() - return this.next(this.parseMaybeSignedInfOrNan) - } - parseMaybeSignedInfOrNan () { - if (this.char === CHAR_i) { - return this.next(this.parseInf) - } else if (this.char === CHAR_n) { - return this.next(this.parseNan) - } else { - return this.callNow(this.parseNoUnder, this.parseNumberIntegerStart) - } - } - parseNumberIntegerStart () { - if (this.char === CHAR_0) { - this.consume() - return this.next(this.parseNumberIntegerExponentOrDecimal) - } else { - return this.goto(this.parseNumberInteger) - } - } - parseNumberIntegerExponentOrDecimal () { - if (this.char === CHAR_PERIOD) { - this.consume() - return this.call(this.parseNoUnder, this.parseNumberFloat) - } else if (this.char === CHAR_E || this.char === CHAR_e) { - this.consume() - return this.next(this.parseNumberExponentSign) - } else { - return this.returnNow(Integer(this.state.buf)) - } - } - parseNumberInteger () { - if (isDigit(this.char)) { - this.consume() - } else if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnder) - } else if (this.char === CHAR_E || this.char === CHAR_e) { - this.consume() - return this.next(this.parseNumberExponentSign) - } else if (this.char === CHAR_PERIOD) { - this.consume() - return this.call(this.parseNoUnder, this.parseNumberFloat) - } else { - const result = Integer(this.state.buf) - /* istanbul ignore if */ - if (result.isNaN()) { - throw this.error(new TomlError('Invalid number')) - } else { - return this.returnNow(result) - } - } - } - parseNoUnder () { - if (this.char === CHAR_LOWBAR || this.char === CHAR_PERIOD || this.char === CHAR_E || this.char === CHAR_e) { - throw this.error(new TomlError('Unexpected character, expected digit')) - } else if (this.atEndOfWord()) { - throw this.error(new TomlError('Incomplete number')) - } - return this.returnNow() - } - parseNoUnderHexOctBinLiteral () { - if (this.char === CHAR_LOWBAR || this.char === CHAR_PERIOD) { - throw this.error(new TomlError('Unexpected character, expected digit')) - } else if (this.atEndOfWord()) { - throw this.error(new TomlError('Incomplete number')) - } - return this.returnNow() - } - parseNumberFloat () { - if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnder, this.parseNumberFloat) - } else if (isDigit(this.char)) { - this.consume() - } else if (this.char === CHAR_E || this.char === CHAR_e) { - this.consume() - return this.next(this.parseNumberExponentSign) - } else { - return this.returnNow(Float(this.state.buf)) - } - } - parseNumberExponentSign () { - if (isDigit(this.char)) { - return this.goto(this.parseNumberExponent) - } else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) { - this.consume() - this.call(this.parseNoUnder, this.parseNumberExponent) - } else { - throw this.error(new TomlError('Unexpected character, expected -, + or digit')) - } - } - parseNumberExponent () { - if (isDigit(this.char)) { - this.consume() - } else if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnder) - } else { - return this.returnNow(Float(this.state.buf)) - } - } - - /* NUMBERS or DATETIMES */ - parseNumberOrDateTime () { - if (this.char === CHAR_0) { - this.consume() - return this.next(this.parseNumberBaseOrDateTime) - } else { - return this.goto(this.parseNumberOrDateTimeOnly) - } - } - parseNumberOrDateTimeOnly () { - // note, if two zeros are in a row then it MUST be a date - if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnder, this.parseNumberInteger) - } else if (isDigit(this.char)) { - this.consume() - if (this.state.buf.length > 4) this.next(this.parseNumberInteger) - } else if (this.char === CHAR_E || this.char === CHAR_e) { - this.consume() - return this.next(this.parseNumberExponentSign) - } else if (this.char === CHAR_PERIOD) { - this.consume() - return this.call(this.parseNoUnder, this.parseNumberFloat) - } else if (this.char === CHAR_HYPHEN) { - return this.goto(this.parseDateTime) - } else if (this.char === CHAR_COLON) { - return this.goto(this.parseOnlyTimeHour) - } else { - return this.returnNow(Integer(this.state.buf)) - } - } - parseDateTimeOnly () { - if (this.state.buf.length < 4) { - if (isDigit(this.char)) { - return this.consume() - } else if (this.char === CHAR_COLON) { - return this.goto(this.parseOnlyTimeHour) - } else { - throw this.error(new TomlError('Expected digit while parsing year part of a date')) - } - } else { - if (this.char === CHAR_HYPHEN) { - return this.goto(this.parseDateTime) - } else { - throw this.error(new TomlError('Expected hyphen (-) while parsing year part of date')) - } - } - } - parseNumberBaseOrDateTime () { - if (this.char === CHAR_b) { - this.consume() - return this.call(this.parseNoUnderHexOctBinLiteral, this.parseIntegerBin) - } else if (this.char === CHAR_o) { - this.consume() - return this.call(this.parseNoUnderHexOctBinLiteral, this.parseIntegerOct) - } else if (this.char === CHAR_x) { - this.consume() - return this.call(this.parseNoUnderHexOctBinLiteral, this.parseIntegerHex) - } else if (this.char === CHAR_PERIOD) { - return this.goto(this.parseNumberInteger) - } else if (isDigit(this.char)) { - return this.goto(this.parseDateTimeOnly) - } else { - return this.returnNow(Integer(this.state.buf)) - } - } - parseIntegerHex () { - if (isHexit(this.char)) { - this.consume() - } else if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnderHexOctBinLiteral) - } else { - const result = Integer(this.state.buf) - /* istanbul ignore if */ - if (result.isNaN()) { - throw this.error(new TomlError('Invalid number')) - } else { - return this.returnNow(result) - } - } - } - parseIntegerOct () { - if (isOctit(this.char)) { - this.consume() - } else if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnderHexOctBinLiteral) - } else { - const result = Integer(this.state.buf) - /* istanbul ignore if */ - if (result.isNaN()) { - throw this.error(new TomlError('Invalid number')) - } else { - return this.returnNow(result) - } - } - } - parseIntegerBin () { - if (isBit(this.char)) { - this.consume() - } else if (this.char === CHAR_LOWBAR) { - return this.call(this.parseNoUnderHexOctBinLiteral) - } else { - const result = Integer(this.state.buf) - /* istanbul ignore if */ - if (result.isNaN()) { - throw this.error(new TomlError('Invalid number')) - } else { - return this.returnNow(result) - } - } - } - - /* DATETIME */ - parseDateTime () { - // we enter here having just consumed the year and about to consume the hyphen - if (this.state.buf.length < 4) { - throw this.error(new TomlError('Years less than 1000 must be zero padded to four characters')) - } - this.state.result = this.state.buf - this.state.buf = '' - return this.next(this.parseDateMonth) - } - parseDateMonth () { - if (this.char === CHAR_HYPHEN) { - if (this.state.buf.length < 2) { - throw this.error(new TomlError('Months less than 10 must be zero padded to two characters')) - } - this.state.result += '-' + this.state.buf - this.state.buf = '' - return this.next(this.parseDateDay) - } else if (isDigit(this.char)) { - this.consume() - } else { - throw this.error(new TomlError('Incomplete datetime')) - } - } - parseDateDay () { - if (this.char === CHAR_T || this.char === CHAR_SP) { - if (this.state.buf.length < 2) { - throw this.error(new TomlError('Days less than 10 must be zero padded to two characters')) - } - this.state.result += '-' + this.state.buf - this.state.buf = '' - return this.next(this.parseStartTimeHour) - } else if (this.atEndOfWord()) { - return this.returnNow(createDate(this.state.result + '-' + this.state.buf)) - } else if (isDigit(this.char)) { - this.consume() - } else { - throw this.error(new TomlError('Incomplete datetime')) - } - } - parseStartTimeHour () { - if (this.atEndOfWord()) { - return this.returnNow(createDate(this.state.result)) - } else { - return this.goto(this.parseTimeHour) - } - } - parseTimeHour () { - if (this.char === CHAR_COLON) { - if (this.state.buf.length < 2) { - throw this.error(new TomlError('Hours less than 10 must be zero padded to two characters')) - } - this.state.result += 'T' + this.state.buf - this.state.buf = '' - return this.next(this.parseTimeMin) - } else if (isDigit(this.char)) { - this.consume() - } else { - throw this.error(new TomlError('Incomplete datetime')) - } - } - parseTimeMin () { - if (this.state.buf.length < 2 && isDigit(this.char)) { - this.consume() - } else if (this.state.buf.length === 2 && this.char === CHAR_COLON) { - this.state.result += ':' + this.state.buf - this.state.buf = '' - return this.next(this.parseTimeSec) - } else { - throw this.error(new TomlError('Incomplete datetime')) - } - } - parseTimeSec () { - if (isDigit(this.char)) { - this.consume() - if (this.state.buf.length === 2) { - this.state.result += ':' + this.state.buf - this.state.buf = '' - return this.next(this.parseTimeZoneOrFraction) - } - } else { - throw this.error(new TomlError('Incomplete datetime')) - } - } - - parseOnlyTimeHour () { - /* istanbul ignore else */ - if (this.char === CHAR_COLON) { - if (this.state.buf.length < 2) { - throw this.error(new TomlError('Hours less than 10 must be zero padded to two characters')) - } - this.state.result = this.state.buf - this.state.buf = '' - return this.next(this.parseOnlyTimeMin) - } else { - throw this.error(new TomlError('Incomplete time')) - } - } - parseOnlyTimeMin () { - if (this.state.buf.length < 2 && isDigit(this.char)) { - this.consume() - } else if (this.state.buf.length === 2 && this.char === CHAR_COLON) { - this.state.result += ':' + this.state.buf - this.state.buf = '' - return this.next(this.parseOnlyTimeSec) - } else { - throw this.error(new TomlError('Incomplete time')) - } - } - parseOnlyTimeSec () { - if (isDigit(this.char)) { - this.consume() - if (this.state.buf.length === 2) { - return this.next(this.parseOnlyTimeFractionMaybe) - } - } else { - throw this.error(new TomlError('Incomplete time')) - } - } - parseOnlyTimeFractionMaybe () { - this.state.result += ':' + this.state.buf - if (this.char === CHAR_PERIOD) { - this.state.buf = '' - this.next(this.parseOnlyTimeFraction) - } else { - return this.return(createTime(this.state.result)) - } - } - parseOnlyTimeFraction () { - if (isDigit(this.char)) { - this.consume() - } else if (this.atEndOfWord()) { - if (this.state.buf.length === 0) throw this.error(new TomlError('Expected digit in milliseconds')) - return this.returnNow(createTime(this.state.result + '.' + this.state.buf)) - } else { - throw this.error(new TomlError('Unexpected character in datetime, expected period (.), minus (-), plus (+) or Z')) - } - } - - parseTimeZoneOrFraction () { - if (this.char === CHAR_PERIOD) { - this.consume() - this.next(this.parseDateTimeFraction) - } else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) { - this.consume() - this.next(this.parseTimeZoneHour) - } else if (this.char === CHAR_Z) { - this.consume() - return this.return(createDateTime(this.state.result + this.state.buf)) - } else if (this.atEndOfWord()) { - return this.returnNow(createDateTimeFloat(this.state.result + this.state.buf)) - } else { - throw this.error(new TomlError('Unexpected character in datetime, expected period (.), minus (-), plus (+) or Z')) - } - } - parseDateTimeFraction () { - if (isDigit(this.char)) { - this.consume() - } else if (this.state.buf.length === 1) { - throw this.error(new TomlError('Expected digit in milliseconds')) - } else if (this.char === CHAR_HYPHEN || this.char === CHAR_PLUS) { - this.consume() - this.next(this.parseTimeZoneHour) - } else if (this.char === CHAR_Z) { - this.consume() - return this.return(createDateTime(this.state.result + this.state.buf)) - } else if (this.atEndOfWord()) { - return this.returnNow(createDateTimeFloat(this.state.result + this.state.buf)) - } else { - throw this.error(new TomlError('Unexpected character in datetime, expected period (.), minus (-), plus (+) or Z')) - } - } - parseTimeZoneHour () { - if (isDigit(this.char)) { - this.consume() - // FIXME: No more regexps - if (/\d\d$/.test(this.state.buf)) return this.next(this.parseTimeZoneSep) - } else { - throw this.error(new TomlError('Unexpected character in datetime, expected digit')) - } - } - parseTimeZoneSep () { - if (this.char === CHAR_COLON) { - this.consume() - this.next(this.parseTimeZoneMin) - } else { - throw this.error(new TomlError('Unexpected character in datetime, expected colon')) - } - } - parseTimeZoneMin () { - if (isDigit(this.char)) { - this.consume() - if (/\d\d$/.test(this.state.buf)) return this.return(createDateTime(this.state.result + this.state.buf)) - } else { - throw this.error(new TomlError('Unexpected character in datetime, expected digit')) - } - } - - /* BOOLEAN */ - parseBoolean () { - /* istanbul ignore else */ - if (this.char === CHAR_t) { - this.consume() - return this.next(this.parseTrue_r) - } else if (this.char === CHAR_f) { - this.consume() - return this.next(this.parseFalse_a) - } - } - parseTrue_r () { - if (this.char === CHAR_r) { - this.consume() - return this.next(this.parseTrue_u) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - parseTrue_u () { - if (this.char === CHAR_u) { - this.consume() - return this.next(this.parseTrue_e) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - parseTrue_e () { - if (this.char === CHAR_e) { - return this.return(true) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - - parseFalse_a () { - if (this.char === CHAR_a) { - this.consume() - return this.next(this.parseFalse_l) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - - parseFalse_l () { - if (this.char === CHAR_l) { - this.consume() - return this.next(this.parseFalse_s) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - - parseFalse_s () { - if (this.char === CHAR_s) { - this.consume() - return this.next(this.parseFalse_e) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - - parseFalse_e () { - if (this.char === CHAR_e) { - return this.return(false) - } else { - throw this.error(new TomlError('Invalid boolean, expected true or false')) - } - } - - /* INLINE LISTS */ - parseInlineList () { - if (this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M || this.char === CTRL_J) { - return null - } else if (this.char === Parser.END) { - throw this.error(new TomlError('Unterminated inline array')) - } else if (this.char === CHAR_NUM) { - return this.call(this.parseComment) - } else if (this.char === CHAR_RSQB) { - return this.return(this.state.resultArr || InlineList()) - } else { - return this.callNow(this.parseValue, this.recordInlineListValue) - } - } - recordInlineListValue (value) { - if (this.state.resultArr) { - const listType = this.state.resultArr[_contentType] - const valueType = tomlType(value) - if (listType !== valueType) { - throw this.error(new TomlError(`Inline lists must be a single type, not a mix of ${listType} and ${valueType}`)) - } - } else { - this.state.resultArr = InlineList(tomlType(value)) - } - if (isFloat(value) || isInteger(value)) { - // unbox now that we've verified they're ok - this.state.resultArr.push(value.valueOf()) - } else { - this.state.resultArr.push(value) - } - return this.goto(this.parseInlineListNext) - } - parseInlineListNext () { - if (this.char === CHAR_SP || this.char === CTRL_I || this.char === CTRL_M || this.char === CTRL_J) { - return null - } else if (this.char === CHAR_NUM) { - return this.call(this.parseComment) - } else if (this.char === CHAR_COMMA) { - return this.next(this.parseInlineList) - } else if (this.char === CHAR_RSQB) { - return this.goto(this.parseInlineList) - } else { - throw this.error(new TomlError('Invalid character, expected whitespace, comma (,) or close bracket (])')) - } - } - - /* INLINE TABLE */ - parseInlineTable () { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else if (this.char === Parser.END || this.char === CHAR_NUM || this.char === CTRL_J || this.char === CTRL_M) { - throw this.error(new TomlError('Unterminated inline array')) - } else if (this.char === CHAR_RCUB) { - return this.return(this.state.resultTable || InlineTable()) - } else { - if (!this.state.resultTable) this.state.resultTable = InlineTable() - return this.callNow(this.parseAssign, this.recordInlineTableValue) - } - } - recordInlineTableValue (kv) { - let target = this.state.resultTable - let finalKey = kv.key.pop() - for (let kw of kv.key) { - if (hasKey(target, kw) && (!isTable(target[kw]) || target[kw][_declared])) { - throw this.error(new TomlError("Can't redefine existing key")) - } - target = target[kw] = target[kw] || Table() - } - if (hasKey(target, finalKey)) { - throw this.error(new TomlError("Can't redefine existing key")) - } - if (isInteger(kv.value) || isFloat(kv.value)) { - target[finalKey] = kv.value.valueOf() - } else { - target[finalKey] = kv.value - } - return this.goto(this.parseInlineTableNext) - } - parseInlineTableNext () { - if (this.char === CHAR_SP || this.char === CTRL_I) { - return null - } else if (this.char === Parser.END || this.char === CHAR_NUM || this.char === CTRL_J || this.char === CTRL_M) { - throw this.error(new TomlError('Unterminated inline array')) - } else if (this.char === CHAR_COMMA) { - return this.next(this.parseInlineTable) - } else if (this.char === CHAR_RCUB) { - return this.goto(this.parseInlineTable) - } else { - throw this.error(new TomlError('Invalid character, expected whitespace, comma (,) or close bracket (])')) - } - } - } - return TOMLParser -} diff --git a/node_modules/@iarna/toml/package.json b/node_modules/@iarna/toml/package.json deleted file mode 100755 index 71f9e82..0000000 --- a/node_modules/@iarna/toml/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "@iarna/toml", - "version": "2.2.5", - "main": "toml.js", - "scripts": { - "test": "tap -J --100 test/*.js test/toml-stream/*.js", - "benchmark": "node benchmark.js && node benchmark-per-file.js && node results2table.js", - "prerelease": "npm t", - "prepack": "rm -f *~", - "postpublish": "git push --follow-tags", - "pretest": "iarna-standard", - "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", - "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'", - "setup-burntsushi-toml-suite": "[ -d test/burntsushi-toml-test ] || (git clone https://github.com/BurntSushi/toml-test test/burntsushi-toml-test; rimraf test/burntsushi-toml-test/.git/hooks/*); cd test/burntsushi-toml-test; git pull", - "setup-iarna-toml-suite": "[ -d test/spec-test ] || (git clone https://github.com/iarna/toml-spec-tests -b 0.5.0 test/spec-test; rimraf test/spec-test/.git/hooks/*); cd test/spec-test; git pull", - "prepare": "npm run setup-burntsushi-toml-suite && npm run setup-iarna-toml-suite" - }, - "keywords": [ - "toml", - "toml-parser", - "toml-stringifier", - "parser", - "stringifer", - "emitter", - "ini", - "tomlify", - "encoder", - "decoder" - ], - "author": "Rebecca Turner (http://re-becca.org/)", - "license": "ISC", - "description": "Better TOML parsing and stringifying all in that familiar JSON interface.", - "dependencies": {}, - "devDependencies": { - "@iarna/standard": "^2.0.2", - "@ltd/j-toml": "^0.5.107", - "@perl/qx": "^1.1.0", - "@sgarciac/bombadil": "^2.3.0", - "ansi": "^0.3.1", - "approximate-number": "^2.0.0", - "benchmark": "^2.1.4", - "fast-toml": "^0.5.4", - "funstream": "^4.2.0", - "glob": "^7.1.6", - "js-yaml": "^3.13.1", - "rimraf": "^3.0.2", - "tap": "^12.0.1", - "toml": "^3.0.0", - "toml-j0.4": "^1.1.1", - "weallbehave": "*", - "weallcontribute": "*" - }, - "files": [ - "toml.js", - "stringify.js", - "parse.js", - "parse-string.js", - "parse-stream.js", - "parse-async.js", - "parse-pretty-error.js", - "lib/parser.js", - "lib/parser-debug.js", - "lib/toml-parser.js", - "lib/create-datetime.js", - "lib/create-date.js", - "lib/create-datetime-float.js", - "lib/create-time.js", - "lib/format-num.js", - "index.d.ts" - ], - "directories": { - "test": "test" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/iarna/iarna-toml.git" - }, - "bugs": { - "url": "https://github.com/iarna/iarna-toml/issues" - }, - "homepage": "https://github.com/iarna/iarna-toml#readme" -} diff --git a/node_modules/@iarna/toml/parse-async.js b/node_modules/@iarna/toml/parse-async.js deleted file mode 100755 index e5ff090..0000000 --- a/node_modules/@iarna/toml/parse-async.js +++ /dev/null @@ -1,30 +0,0 @@ -'use strict' -module.exports = parseAsync - -const TOMLParser = require('./lib/toml-parser.js') -const prettyError = require('./parse-pretty-error.js') - -function parseAsync (str, opts) { - if (!opts) opts = {} - const index = 0 - const blocksize = opts.blocksize || 40960 - const parser = new TOMLParser() - return new Promise((resolve, reject) => { - setImmediate(parseAsyncNext, index, blocksize, resolve, reject) - }) - function parseAsyncNext (index, blocksize, resolve, reject) { - if (index >= str.length) { - try { - return resolve(parser.finish()) - } catch (err) { - return reject(prettyError(err, str)) - } - } - try { - parser.parse(str.slice(index, index + blocksize)) - setImmediate(parseAsyncNext, index + blocksize, blocksize, resolve, reject) - } catch (err) { - reject(prettyError(err, str)) - } - } -} diff --git a/node_modules/@iarna/toml/parse-pretty-error.js b/node_modules/@iarna/toml/parse-pretty-error.js deleted file mode 100755 index fc0d31f..0000000 --- a/node_modules/@iarna/toml/parse-pretty-error.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' -module.exports = prettyError - -function prettyError (err, buf) { - /* istanbul ignore if */ - if (err.pos == null || err.line == null) return err - let msg = err.message - msg += ` at row ${err.line + 1}, col ${err.col + 1}, pos ${err.pos}:\n` - - /* istanbul ignore else */ - if (buf && buf.split) { - const lines = buf.split(/\n/) - const lineNumWidth = String(Math.min(lines.length, err.line + 3)).length - let linePadding = ' ' - while (linePadding.length < lineNumWidth) linePadding += ' ' - for (let ii = Math.max(0, err.line - 1); ii < Math.min(lines.length, err.line + 2); ++ii) { - let lineNum = String(ii + 1) - if (lineNum.length < lineNumWidth) lineNum = ' ' + lineNum - if (err.line === ii) { - msg += lineNum + '> ' + lines[ii] + '\n' - msg += linePadding + ' ' - for (let hh = 0; hh < err.col; ++hh) { - msg += ' ' - } - msg += '^\n' - } else { - msg += lineNum + ': ' + lines[ii] + '\n' - } - } - } - err.message = msg + '\n' - return err -} diff --git a/node_modules/@iarna/toml/parse-stream.js b/node_modules/@iarna/toml/parse-stream.js deleted file mode 100755 index fb9a644..0000000 --- a/node_modules/@iarna/toml/parse-stream.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' -module.exports = parseStream - -const stream = require('stream') -const TOMLParser = require('./lib/toml-parser.js') - -function parseStream (stm) { - if (stm) { - return parseReadable(stm) - } else { - return parseTransform(stm) - } -} - -function parseReadable (stm) { - const parser = new TOMLParser() - stm.setEncoding('utf8') - return new Promise((resolve, reject) => { - let readable - let ended = false - let errored = false - function finish () { - ended = true - if (readable) return - try { - resolve(parser.finish()) - } catch (err) { - reject(err) - } - } - function error (err) { - errored = true - reject(err) - } - stm.once('end', finish) - stm.once('error', error) - readNext() - - function readNext () { - readable = true - let data - while ((data = stm.read()) !== null) { - try { - parser.parse(data) - } catch (err) { - return error(err) - } - } - readable = false - /* istanbul ignore if */ - if (ended) return finish() - /* istanbul ignore if */ - if (errored) return - stm.once('readable', readNext) - } - }) -} - -function parseTransform () { - const parser = new TOMLParser() - return new stream.Transform({ - objectMode: true, - transform (chunk, encoding, cb) { - try { - parser.parse(chunk.toString(encoding)) - } catch (err) { - this.emit('error', err) - } - cb() - }, - flush (cb) { - try { - this.push(parser.finish()) - } catch (err) { - this.emit('error', err) - } - cb() - } - }) -} diff --git a/node_modules/@iarna/toml/parse-string.js b/node_modules/@iarna/toml/parse-string.js deleted file mode 100755 index 84ff7d4..0000000 --- a/node_modules/@iarna/toml/parse-string.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' -module.exports = parseString - -const TOMLParser = require('./lib/toml-parser.js') -const prettyError = require('./parse-pretty-error.js') - -function parseString (str) { - if (global.Buffer && global.Buffer.isBuffer(str)) { - str = str.toString('utf8') - } - const parser = new TOMLParser() - try { - parser.parse(str) - return parser.finish() - } catch (err) { - throw prettyError(err, str) - } -} diff --git a/node_modules/@iarna/toml/parse.js b/node_modules/@iarna/toml/parse.js deleted file mode 100755 index 923b9d3..0000000 --- a/node_modules/@iarna/toml/parse.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' -module.exports = require('./parse-string.js') -module.exports.async = require('./parse-async.js') -module.exports.stream = require('./parse-stream.js') -module.exports.prettyError = require('./parse-pretty-error.js') diff --git a/node_modules/@iarna/toml/stringify.js b/node_modules/@iarna/toml/stringify.js deleted file mode 100755 index 958caae..0000000 --- a/node_modules/@iarna/toml/stringify.js +++ /dev/null @@ -1,296 +0,0 @@ -'use strict' -module.exports = stringify -module.exports.value = stringifyInline - -function stringify (obj) { - if (obj === null) throw typeError('null') - if (obj === void (0)) throw typeError('undefined') - if (typeof obj !== 'object') throw typeError(typeof obj) - - if (typeof obj.toJSON === 'function') obj = obj.toJSON() - if (obj == null) return null - const type = tomlType(obj) - if (type !== 'table') throw typeError(type) - return stringifyObject('', '', obj) -} - -function typeError (type) { - return new Error('Can only stringify objects, not ' + type) -} - -function arrayOneTypeError () { - return new Error("Array values can't have mixed types") -} - -function getInlineKeys (obj) { - return Object.keys(obj).filter(key => isInline(obj[key])) -} -function getComplexKeys (obj) { - return Object.keys(obj).filter(key => !isInline(obj[key])) -} - -function toJSON (obj) { - let nobj = Array.isArray(obj) ? [] : Object.prototype.hasOwnProperty.call(obj, '__proto__') ? {['__proto__']: undefined} : {} - for (let prop of Object.keys(obj)) { - if (obj[prop] && typeof obj[prop].toJSON === 'function' && !('toISOString' in obj[prop])) { - nobj[prop] = obj[prop].toJSON() - } else { - nobj[prop] = obj[prop] - } - } - return nobj -} - -function stringifyObject (prefix, indent, obj) { - obj = toJSON(obj) - var inlineKeys - var complexKeys - inlineKeys = getInlineKeys(obj) - complexKeys = getComplexKeys(obj) - var result = [] - var inlineIndent = indent || '' - inlineKeys.forEach(key => { - var type = tomlType(obj[key]) - if (type !== 'undefined' && type !== 'null') { - result.push(inlineIndent + stringifyKey(key) + ' = ' + stringifyAnyInline(obj[key], true)) - } - }) - if (result.length > 0) result.push('') - var complexIndent = prefix && inlineKeys.length > 0 ? indent + ' ' : '' - complexKeys.forEach(key => { - result.push(stringifyComplex(prefix, complexIndent, key, obj[key])) - }) - return result.join('\n') -} - -function isInline (value) { - switch (tomlType(value)) { - case 'undefined': - case 'null': - case 'integer': - case 'nan': - case 'float': - case 'boolean': - case 'string': - case 'datetime': - return true - case 'array': - return value.length === 0 || tomlType(value[0]) !== 'table' - case 'table': - return Object.keys(value).length === 0 - /* istanbul ignore next */ - default: - return false - } -} - -function tomlType (value) { - if (value === undefined) { - return 'undefined' - } else if (value === null) { - return 'null' - /* eslint-disable valid-typeof */ - } else if (typeof value === 'bigint' || (Number.isInteger(value) && !Object.is(value, -0))) { - return 'integer' - } else if (typeof value === 'number') { - return 'float' - } else if (typeof value === 'boolean') { - return 'boolean' - } else if (typeof value === 'string') { - return 'string' - } else if ('toISOString' in value) { - return isNaN(value) ? 'undefined' : 'datetime' - } else if (Array.isArray(value)) { - return 'array' - } else { - return 'table' - } -} - -function stringifyKey (key) { - var keyStr = String(key) - if (/^[-A-Za-z0-9_]+$/.test(keyStr)) { - return keyStr - } else { - return stringifyBasicString(keyStr) - } -} - -function stringifyBasicString (str) { - return '"' + escapeString(str).replace(/"/g, '\\"') + '"' -} - -function stringifyLiteralString (str) { - return "'" + str + "'" -} - -function numpad (num, str) { - while (str.length < num) str = '0' + str - return str -} - -function escapeString (str) { - return str.replace(/\\/g, '\\\\') - .replace(/[\b]/g, '\\b') - .replace(/\t/g, '\\t') - .replace(/\n/g, '\\n') - .replace(/\f/g, '\\f') - .replace(/\r/g, '\\r') - /* eslint-disable no-control-regex */ - .replace(/([\u0000-\u001f\u007f])/, c => '\\u' + numpad(4, c.codePointAt(0).toString(16))) - /* eslint-enable no-control-regex */ -} - -function stringifyMultilineString (str) { - let escaped = str.split(/\n/).map(str => { - return escapeString(str).replace(/"(?="")/g, '\\"') - }).join('\n') - if (escaped.slice(-1) === '"') escaped += '\\\n' - return '"""\n' + escaped + '"""' -} - -function stringifyAnyInline (value, multilineOk) { - let type = tomlType(value) - if (type === 'string') { - if (multilineOk && /\n/.test(value)) { - type = 'string-multiline' - } else if (!/[\b\t\n\f\r']/.test(value) && /"/.test(value)) { - type = 'string-literal' - } - } - return stringifyInline(value, type) -} - -function stringifyInline (value, type) { - /* istanbul ignore if */ - if (!type) type = tomlType(value) - switch (type) { - case 'string-multiline': - return stringifyMultilineString(value) - case 'string': - return stringifyBasicString(value) - case 'string-literal': - return stringifyLiteralString(value) - case 'integer': - return stringifyInteger(value) - case 'float': - return stringifyFloat(value) - case 'boolean': - return stringifyBoolean(value) - case 'datetime': - return stringifyDatetime(value) - case 'array': - return stringifyInlineArray(value.filter(_ => tomlType(_) !== 'null' && tomlType(_) !== 'undefined' && tomlType(_) !== 'nan')) - case 'table': - return stringifyInlineTable(value) - /* istanbul ignore next */ - default: - throw typeError(type) - } -} - -function stringifyInteger (value) { - /* eslint-disable security/detect-unsafe-regex */ - return String(value).replace(/\B(?=(\d{3})+(?!\d))/g, '_') -} - -function stringifyFloat (value) { - if (value === Infinity) { - return 'inf' - } else if (value === -Infinity) { - return '-inf' - } else if (Object.is(value, NaN)) { - return 'nan' - } else if (Object.is(value, -0)) { - return '-0.0' - } - var chunks = String(value).split('.') - var int = chunks[0] - var dec = chunks[1] || 0 - return stringifyInteger(int) + '.' + dec -} - -function stringifyBoolean (value) { - return String(value) -} - -function stringifyDatetime (value) { - return value.toISOString() -} - -function isNumber (type) { - return type === 'float' || type === 'integer' -} -function arrayType (values) { - var contentType = tomlType(values[0]) - if (values.every(_ => tomlType(_) === contentType)) return contentType - // mixed integer/float, emit as floats - if (values.every(_ => isNumber(tomlType(_)))) return 'float' - return 'mixed' -} -function validateArray (values) { - const type = arrayType(values) - if (type === 'mixed') { - throw arrayOneTypeError() - } - return type -} - -function stringifyInlineArray (values) { - values = toJSON(values) - const type = validateArray(values) - var result = '[' - var stringified = values.map(_ => stringifyInline(_, type)) - if (stringified.join(', ').length > 60 || /\n/.test(stringified)) { - result += '\n ' + stringified.join(',\n ') + '\n' - } else { - result += ' ' + stringified.join(', ') + (stringified.length > 0 ? ' ' : '') - } - return result + ']' -} - -function stringifyInlineTable (value) { - value = toJSON(value) - var result = [] - Object.keys(value).forEach(key => { - result.push(stringifyKey(key) + ' = ' + stringifyAnyInline(value[key], false)) - }) - return '{ ' + result.join(', ') + (result.length > 0 ? ' ' : '') + '}' -} - -function stringifyComplex (prefix, indent, key, value) { - var valueType = tomlType(value) - /* istanbul ignore else */ - if (valueType === 'array') { - return stringifyArrayOfTables(prefix, indent, key, value) - } else if (valueType === 'table') { - return stringifyComplexTable(prefix, indent, key, value) - } else { - throw typeError(valueType) - } -} - -function stringifyArrayOfTables (prefix, indent, key, values) { - values = toJSON(values) - validateArray(values) - var firstValueType = tomlType(values[0]) - /* istanbul ignore if */ - if (firstValueType !== 'table') throw typeError(firstValueType) - var fullKey = prefix + stringifyKey(key) - var result = '' - values.forEach(table => { - if (result.length > 0) result += '\n' - result += indent + '[[' + fullKey + ']]\n' - result += stringifyObject(fullKey + '.', indent, table) - }) - return result -} - -function stringifyComplexTable (prefix, indent, key, value) { - var fullKey = prefix + stringifyKey(key) - var result = '' - if (getInlineKeys(value).length > 0) { - result += indent + '[' + fullKey + ']\n' - } - return result + stringifyObject(fullKey + '.', indent, value) -} diff --git a/node_modules/@iarna/toml/toml.js b/node_modules/@iarna/toml/toml.js deleted file mode 100755 index edca17c..0000000 --- a/node_modules/@iarna/toml/toml.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' -exports.parse = require('./parse.js') -exports.stringify = require('./stringify.js') diff --git a/node_modules/toml-patch/LICENSE b/node_modules/toml-patch/LICENSE new file mode 100644 index 0000000..aef843e --- /dev/null +++ b/node_modules/toml-patch/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Tim Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/toml-patch/README.md b/node_modules/toml-patch/README.md new file mode 100644 index 0000000..4131a77 --- /dev/null +++ b/node_modules/toml-patch/README.md @@ -0,0 +1,104 @@ +# toml-patch + +Patch, parse, and stringify TOML. + +## Installation + +toml-patch is dependency-free and can be installed via npm or yarn. + +``` +$ npm install --save toml-patch +``` + +For browser usage, you can use unpkg: + +```html + +``` + +## API + +# patch(existing, updated) + +Patch an existing TOML string with the given updated JS/JSON value, while attempting to retain the format of the existing document, including comments, indentation, and structure. + +```js +const TOML = require('toml-patch'); +const assert = require('assert'); + +const existing = ` +# This is a TOML document + +title = "TOML example" +owner.name = "Bob" +` +const patched = TOML.patch(existing, { + title: 'TOML example', + owner: { + name: 'Tim' + } +}); + +assert.strictEqual(patched, ` +# This is a TOML document + +title = "TOML example" +owner.name = "Tim" +`); +``` + +# parse(value) + +Parse a TOML string into a JS/JSON value. + +```js +const TOML = require('toml-patch'); +const assert = require('assert'); + +const parsed = TOML.parse(` +# This is a TOML document. + +title = "TOML Example" + +[owner] +name = "Tim"`); + +assert.deepStrictEqual(parsed, { + title: 'TOML Example', + owner: { + name: 'Tim' + } +}); +``` + +# stringify(value[, options]) + +Convert a JS/JSON value to a TOML string. `options` can be provided for high-level formatting guidelines that follows prettier's configuration. + +options + +- `[printWidth = 80]` - (coming soon) +- `[trailingComma = false]` - Add trailing comma to inline tables +- `[bracketSpacing = true]` - `true`: `{ key = "value" }`, `false`: `{key = "value"}` + +```js +const TOML = require('toml-patch'); +const assert = require('assert'); + +const toml = TOML.stringify({ + title: 'TOML Example', + owner: { + name: 'Tim' + } +}); + +assert.strictEqual(toml, +`title = "TOML Example" + +[owner] +name = "Tim"`); +``` + +## Development + +[![Build Status](https://dev.azure.com/timhallengr/toml-patch/_apis/build/status/timhall.toml-patch?branchName=master)](https://dev.azure.com/timhallengr/toml-patch/_build/latest?definitionId=1&branchName=master) diff --git a/node_modules/toml-patch/dist/toml-patch.cjs.min.js b/node_modules/toml-patch/dist/toml-patch.cjs.min.js new file mode 100644 index 0000000..09da76c --- /dev/null +++ b/node_modules/toml-patch/dist/toml-patch.cjs.min.js @@ -0,0 +1,2 @@ +"use strict";var NodeType,TokenType;function isDocument(e){return e.type===NodeType.Document}function isTable(e){return e.type===NodeType.Table}function isTableKey(e){return e.type===NodeType.TableKey}function isTableArray(e){return e.type===NodeType.TableArray}function isTableArrayKey(e){return e.type===NodeType.TableArrayKey}function isKeyValue(e){return e.type===NodeType.KeyValue}function isInlineArray(e){return e.type===NodeType.InlineArray}function isInlineItem(e){return e.type===NodeType.InlineItem}function isInlineTable(e){return e.type===NodeType.InlineTable}function isComment(e){return e.type===NodeType.Comment}function hasItems(e){return isDocument(e)||isTable(e)||isTableArray(e)||isInlineTable(e)||isInlineArray(e)}function hasItem(e){return isTableKey(e)||isTableArrayKey(e)||isInlineItem(e)}function isBlock(e){return isKeyValue(e)||isTable(e)||isTableArray(e)||isComment(e)}function iterator(e){return e[Symbol.iterator]()}Object.defineProperty(exports,"__esModule",{value:!0}),function(e){e.Document="Document",e.Table="Table",e.TableKey="TableKey",e.TableArray="TableArray",e.TableArrayKey="TableArrayKey",e.KeyValue="KeyValue",e.Key="Key",e.String="String",e.Integer="Integer",e.Float="Float",e.Boolean="Boolean",e.DateTime="DateTime",e.InlineArray="InlineArray",e.InlineItem="InlineItem",e.InlineTable="InlineTable",e.Comment="Comment"}(NodeType||(NodeType={}));class Cursor{constructor(e){this.iterator=e,this.index=-1,this.value=void 0,this.done=!1,this.peeked=null}next(){if(this.done)return done();const e=this.peeked||this.iterator.next();return this.index+=1,this.value=e.value,this.done=e.done,this.peeked=null,e}peek(){return this.done?done():this.peeked?this.peeked:(this.peeked=this.iterator.next(),this.peeked)}[Symbol.iterator](){return this}}function done(){return{value:void 0,done:!0}}function getSpan(e){return{lines:e.end.line-e.start.line+1,columns:e.end.column-e.start.column}}function createLocate(e){const t=findLines(e);return(e,n)=>({start:findPosition(t,e),end:findPosition(t,n)})}function findPosition(e,t){const n=Array.isArray(e)?e:findLines(e),r=n.findIndex(e=>e>=t)+1;return{line:r,column:t-(n[r-2]+1||0)}}function getLine(e,t){const n=findLines(e),r=n[t.line-2]||0,o=n[t.line-1]||e.length;return e.substr(r,o-r)}function findLines(e){const t=/[\r\n|\n]/g,n=[];let r;for(;null!=(r=t.exec(e));)n.push(r.index);return n.push(e.length+1),n}function clonePosition(e){return{line:e.line,column:e.column}}function cloneLocation(e){return{start:clonePosition(e.start),end:clonePosition(e.end)}}function zero(){return{line:1,column:0}}class ParseError extends Error{constructor(e,t,n){let r=`Error parsing TOML (${t.line}, ${t.column+1}):\n`;if(e){const n=getLine(e,t),o=`${whitespace(t.column)}^`;n&&(r+=`${n}\n${o}\n`)}super(r+=n),this.line=t.line,this.column=t.column}}function whitespace(e,t=" "){return t.repeat(e)}!function(e){e.Bracket="Bracket",e.Curly="Curly",e.Equal="Equal",e.Comma="Comma",e.Dot="Dot",e.Comment="Comment",e.Literal="Literal"}(TokenType||(TokenType={}));const IS_WHITESPACE=/\s/,IS_NEW_LINE=/(\r\n|\n)/,DOUBLE_QUOTE='"',SINGLE_QUOTE="'",SPACE=" ",ESCAPE="\\",IS_VALID_LEADING_CHARACTER=/[\w,\d,\",\',\+,\-,\_]/;function*tokenize(e){const t=new Cursor(iterator(e));t.next();const n=createLocate(e);for(;!t.done;){if(IS_WHITESPACE.test(t.value));else if("["===t.value||"]"===t.value)yield specialCharacter(t,n,TokenType.Bracket);else if("{"===t.value||"}"===t.value)yield specialCharacter(t,n,TokenType.Curly);else if("="===t.value)yield specialCharacter(t,n,TokenType.Equal);else if(","===t.value)yield specialCharacter(t,n,TokenType.Comma);else if("."===t.value)yield specialCharacter(t,n,TokenType.Dot);else if("#"===t.value)yield comment(t,n);else{const r=checkThree(e,t.index,SINGLE_QUOTE)||checkThree(e,t.index,DOUBLE_QUOTE);r?yield multiline(t,n,r,e):yield string(t,n,e)}t.next()}}function specialCharacter(e,t,n){return{type:n,raw:e.value,loc:t(e.index,e.index+1)}}function comment(e,t){const n=e.index;let r=e.value;for(;!e.peek().done&&!IS_NEW_LINE.test(e.peek().value);)e.next(),r+=e.value;return{type:TokenType.Comment,raw:r,loc:t(n,e.index+1)}}function multiline(e,t,n,r){const o=e.index;let a=n+n+n,l=a;for(e.next(),e.next(),e.next();!e.done&&!checkThree(r,e.index,n);)l+=e.value,e.next();if(e.done)throw new ParseError(r,findPosition(r,e.index),`Expected close of multiline string with ${a}, reached end of file`);return l+=a,e.next(),e.next(),{type:TokenType.Literal,raw:l,loc:t(o,e.index+1)}}function string(e,t,n){if(!IS_VALID_LEADING_CHARACTER.test(e.value))throw new ParseError(n,findPosition(n,e.index),`Unsupported character "${e.value}". Expected ALPHANUMERIC, ", ', +, -, or _`);const r=e.index;let o=e.value,a=e.value===DOUBLE_QUOTE,l=e.value===SINGLE_QUOTE;const i=e=>{if(e.peek().done)return!0;const t=e.peek().value;return!(a||l)&&(IS_WHITESPACE.test(t)||","===t||"."===t||"]"===t||"}"===t||"="===t)};for(;!e.done&&!i(e)&&(e.next(),e.value===DOUBLE_QUOTE&&(a=!a),e.value!==SINGLE_QUOTE||a||(l=!l),o+=e.value,!e.peek().done);){let t=e.peek().value;a&&e.value===ESCAPE&&(t===DOUBLE_QUOTE?(o+=DOUBLE_QUOTE,e.next()):t===ESCAPE&&(o+=ESCAPE,e.next()))}if(a||l)throw new ParseError(n,findPosition(n,r),`Expected close of string with ${a?DOUBLE_QUOTE:SINGLE_QUOTE}`);return{type:TokenType.Literal,raw:o,loc:t(r,e.index+1)}}function checkThree(e,t,n){return e[t]===n&&e[t+1]===n&&e[t+2]===n&&n}function last(e){return e[e.length-1]}function blank(){return Object.create(null)}function isString(e){return"string"==typeof e}function isInteger(e){return"number"==typeof e&&e%1==0}function isFloat(e){return"number"==typeof e&&!isInteger(e)}function isBoolean(e){return"boolean"==typeof e}function isDate(e){return"[object Date]"===Object.prototype.toString.call(e)}function isObject(e){return e&&"object"==typeof e&&!isDate(e)&&!Array.isArray(e)}function isIterable(e){return null!=e&&"function"==typeof e[Symbol.iterator]}function has(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function arraysEqual(e,t){if(e.length!==t.length)return!1;for(let n=0;nt(e),e)}function stableStringify(e){if(isObject(e)){return`{${Object.keys(e).sort().map(t=>`${JSON.stringify(t)}:${stableStringify(e[t])}`).join(",")}}`}return Array.isArray(e)?`[${e.map(stableStringify).join(",")}]`:JSON.stringify(e)}function merge(e,t){const n=e.length,r=t.length;e.length=n+r;for(let o=0;o{const t=parseInt(e.replace("\\U",""),16),n=String.fromCodePoint(t);return trim(JSON.stringify(n),1)});return JSON.parse(`"${t}"`)}function trim(e,t){return e.substr(t,e.length-2*t)}function trimLeadingWhitespace(e){return IS_LEADING_NEW_LINE.test(e)?e.substr(1):e}function escapeNewLines(e){return e.replace(IS_CRLF,CRLF).replace(IS_LF,LF)}function lineEndingBackslash(e){return e.replace(IS_LINE_ENDING_BACKSLASH,"")}const TRUE="true",FALSE="false",HAS_E=/e/i,IS_DIVIDER=/\_/g,IS_INF=/inf/,IS_NAN=/nan/,IS_HEX=/^0x/,IS_OCTAL=/^0o/,IS_BINARY=/^0b/,IS_FULL_DATE=/(\d{4})-(\d{2})-(\d{2})/,IS_FULL_TIME=/(\d{2}):(\d{2}):(\d{2})/;function*parseTOML(e){const t=tokenize(e),n=new Cursor(t);for(;!n.next().done;)yield*walkBlock(n,e)}function*walkBlock(e,t){if(e.value.type===TokenType.Comment)yield comment$1(e);else if(e.value.type===TokenType.Bracket)yield table(e,t);else{if(e.value.type!==TokenType.Literal)throw new ParseError(t,e.value.loc.start,`Unexpected token "${e.value.type}". Expected Comment, Bracket, or String`);yield*keyValue(e,t)}}function*walkValue(e,t){if(e.value.type===TokenType.Literal)e.value.raw[0]===DOUBLE_QUOTE||e.value.raw[0]===SINGLE_QUOTE?yield string$1(e):e.value.raw===TRUE||e.value.raw===FALSE?yield boolean(e):IS_FULL_DATE.test(e.value.raw)||IS_FULL_TIME.test(e.value.raw)?yield datetime(e,t):!e.peek().done&&e.peek().value.type===TokenType.Dot||IS_INF.test(e.value.raw)||IS_NAN.test(e.value.raw)||HAS_E.test(e.value.raw)&&!IS_HEX.test(e.value.raw)?yield float(e,t):yield integer(e);else if(e.value.type===TokenType.Curly)yield inlineTable(e,t);else{if(e.value.type!==TokenType.Bracket)throw new ParseError(t,e.value.loc.start,`Unrecognized token type "${e.value.type}". Expected String, Curly, or Bracket`);{const[n,r]=inlineArray(e,t);yield n,yield*r}}}function comment$1(e){return{type:NodeType.Comment,loc:e.value.loc,raw:e.value.raw}}function table(e,t){const n=e.peek().done||e.peek().value.type!==TokenType.Bracket?NodeType.Table:NodeType.TableArray,r=n===NodeType.Table;if(r&&"["!==e.value.raw)throw new ParseError(t,e.value.loc.start,`Expected table opening "[", found ${e.value.raw}`);if(!r&&("["!==e.value.raw||"["!==e.peek().value.raw))throw new ParseError(t,e.value.loc.start,`Expected array of tables opening "[[", found ${e.value.raw+e.peek().value.raw}`);const o=r?{type:NodeType.TableKey,loc:e.value.loc}:{type:NodeType.TableArrayKey,loc:e.value.loc};if(e.next(),n===NodeType.TableArray&&e.next(),e.done)throw new ParseError(t,o.loc.start,"Expected table key, reached end of file");for(o.item={type:NodeType.Key,loc:cloneLocation(e.value.loc),raw:e.value.raw,value:[parseString(e.value.raw)]};!e.peek().done&&e.peek().value.type===TokenType.Dot;){e.next();const t=e.value;e.next();const n=" ".repeat(t.loc.start.column-o.item.loc.end.column),r=" ".repeat(e.value.loc.start.column-t.loc.end.column);o.item.loc.end=e.value.loc.end,o.item.raw+=`${n}.${r}${e.value.raw}`,o.item.value.push(parseString(e.value.raw))}if(e.next(),r&&(e.done||"]"!==e.value.raw))throw new ParseError(t,e.done?o.item.loc.end:e.value.loc.start,`Expected table closing "]", found ${e.done?"end of file":e.value.raw}`);if(!r&&(e.done||e.peek().done||"]"!==e.value.raw||"]"!==e.peek().value.raw))throw new ParseError(t,e.done||e.peek().done?o.item.loc.end:e.value.loc.start,`Expected array of tables closing "]]", found ${e.done||e.peek().done?"end of file":e.value.raw+e.peek().value.raw}`);r||e.next(),o.loc.end=e.value.loc.end;let a=[];for(;!e.peek().done&&e.peek().value.type!==TokenType.Bracket;)e.next(),merge(a,[...walkBlock(e,t)]);return{type:r?NodeType.Table:NodeType.TableArray,loc:{start:clonePosition(o.loc.start),end:clonePosition(a.length?a[a.length-1].loc.end:o.loc.end)},key:o,items:a}}function keyValue(e,t){const n={type:NodeType.Key,loc:cloneLocation(e.value.loc),raw:e.value.raw,value:[parseString(e.value.raw)]};for(;!e.peek().done&&e.peek().value.type===TokenType.Dot;)e.next(),e.next(),n.loc.end=e.value.loc.end,n.raw+=`.${e.value.raw}`,n.value.push(parseString(e.value.raw));if(e.next(),e.done||e.value.type!==TokenType.Equal)throw new ParseError(t,e.done?n.loc.end:e.value.loc.start,`Expected "=" for key-value, found ${e.done?"end of file":e.value.raw}`);const r=e.value.loc.start.column;if(e.next(),e.done)throw new ParseError(t,n.loc.start,"Expected value for key-value, reached end of file");const[o,...a]=walkValue(e,t);return[{type:NodeType.KeyValue,key:n,value:o,loc:{start:clonePosition(n.loc.start),end:clonePosition(o.loc.end)},equals:r},...a]}function string$1(e){return{type:NodeType.String,loc:e.value.loc,raw:e.value.raw,value:parseString(e.value.raw)}}function boolean(e){return{type:NodeType.Boolean,loc:e.value.loc,value:e.value.raw===TRUE}}function datetime(e,t){let n,r=e.value.loc,o=e.value.raw;if(!e.peek().done&&e.peek().value.type===TokenType.Literal&&IS_FULL_DATE.test(o)&&IS_FULL_TIME.test(e.peek().value.raw)){const t=r.start;e.next(),r={start:t,end:e.value.loc.end},o+=` ${e.value.raw}`}if(!e.peek().done&&e.peek().value.type===TokenType.Dot){const n=r.start;if(e.next(),e.peek().done||e.peek().value.type!==TokenType.Literal)throw new ParseError(t,e.value.loc.end,"Expected fractional value for DateTime");e.next(),r={start:n,end:e.value.loc.end},o+=`.${e.value.raw}`}if(IS_FULL_DATE.test(o))n=new Date(o.replace(" ","T"));else{const[e]=(new Date).toISOString().split("T");n=new Date(`${e}T${o}`)}return{type:NodeType.DateTime,loc:r,raw:o,value:n}}function float(e,t){let n,r=e.value.loc,o=e.value.raw;if(IS_INF.test(o))n="-inf"===o?-1/0:1/0;else if(IS_NAN.test(o))n=NaN;else if(e.peek().done||e.peek().value.type!==TokenType.Dot)n=Number(o.replace(IS_DIVIDER,""));else{const a=r.start;if(e.next(),e.peek().done||e.peek().value.type!==TokenType.Literal)throw new ParseError(t,e.value.loc.end,"Expected fraction value for Float");e.next(),o+=`.${e.value.raw}`,r={start:a,end:e.value.loc.end},n=Number(o.replace(IS_DIVIDER,""))}return{type:NodeType.Float,loc:r,raw:o,value:n}}function integer(e){if("-0"===e.value.raw||"+0"===e.value.raw)return{type:NodeType.Integer,loc:e.value.loc,raw:e.value.raw,value:0};let t=10;IS_HEX.test(e.value.raw)?t=16:IS_OCTAL.test(e.value.raw)?t=8:IS_BINARY.test(e.value.raw)&&(t=2);const n=parseInt(e.value.raw.replace(IS_DIVIDER,"").replace(IS_OCTAL,"").replace(IS_BINARY,""),t);return{type:NodeType.Integer,loc:e.value.loc,raw:e.value.raw,value:n}}function inlineTable(e,t){if("{"!==e.value.raw)throw new ParseError(t,e.value.loc.start,`Expected "{" for inline table, found ${e.value.raw}`);const n={type:NodeType.InlineTable,loc:cloneLocation(e.value.loc),items:[]};for(e.next();!e.done&&(e.value.type!==TokenType.Curly||"}"!==e.value.raw);){if(e.value.type===TokenType.Comma){const r=n.items[n.items.length-1];if(!r)throw new ParseError(t,e.value.loc.start,'Found "," without previous value in inline table');r.comma=!0,r.loc.end=e.value.loc.start,e.next();continue}const[r]=walkBlock(e,t);if(r.type!==NodeType.KeyValue)throw new ParseError(t,e.value.loc.start,`Only key-values are supported in inline tables, found ${r.type}`);const o={type:NodeType.InlineItem,loc:cloneLocation(r.loc),item:r,comma:!1};n.items.push(o),e.next()}if(e.done||e.value.type!==TokenType.Curly||"}"!==e.value.raw)throw new ParseError(t,e.done?n.loc.start:e.value.loc.start,`Expected "}", found ${e.done?"end of file":e.value.raw}`);return n.loc.end=e.value.loc.end,n}function inlineArray(e,t){if("["!==e.value.raw)throw new ParseError(t,e.value.loc.start,`Expected "[" for inline array, found ${e.value.raw}`);const n={type:NodeType.InlineArray,loc:cloneLocation(e.value.loc),items:[]};let r=[];for(e.next();!e.done&&(e.value.type!==TokenType.Bracket||"]"!==e.value.raw);){if(e.value.type===TokenType.Comma){const r=n.items[n.items.length-1];if(!r)throw new ParseError(t,e.value.loc.start,'Found "," without previous value for inline array');r.comma=!0,r.loc.end=e.value.loc.start}else if(e.value.type===TokenType.Comment)r.push(comment$1(e));else{const[o,...a]=walkValue(e,t),l={type:NodeType.InlineItem,loc:cloneLocation(o.loc),item:o,comma:!1};n.items.push(l),merge(r,a)}e.next()}if(e.done||e.value.type!==TokenType.Bracket||"]"!==e.value.raw)throw new ParseError(t,e.done?n.loc.start:e.value.loc.start,`Expected "]", found ${e.done?"end of file":e.value.raw}`);return n.loc.end=e.value.loc.end,[n,r]}function traverse(e,t){function n(e,t){for(const n of e)r(n,t)}function r(e,o){const a=t[e.type];switch(a&&"function"==typeof a&&a(e,o),a&&a.enter&&a.enter(e,o),e.type){case NodeType.Document:n(e.items,e);break;case NodeType.Table:r(e.key,e),n(e.items,e);break;case NodeType.TableKey:r(e.item,e);break;case NodeType.TableArray:r(e.key,e),n(e.items,e);break;case NodeType.TableArrayKey:r(e.item,e);break;case NodeType.KeyValue:r(e.key,e),r(e.value,e);break;case NodeType.InlineArray:n(e.items,e);break;case NodeType.InlineItem:r(e.item,e);break;case NodeType.InlineTable:n(e.items,e);break;case NodeType.Key:case NodeType.String:case NodeType.Integer:case NodeType.Float:case NodeType.Boolean:case NodeType.DateTime:case NodeType.Comment:break;default:throw new Error(`Unrecognized node type "${e.type}"`)}a&&a.exit&&a.exit(e,o)}isIterable(e)?n(e,null):r(e,null)}const enter_offsets=new WeakMap,getEnter=e=>(enter_offsets.has(e)||enter_offsets.set(e,new WeakMap),enter_offsets.get(e)),exit_offsets=new WeakMap,getExit=e=>(exit_offsets.has(e)||exit_offsets.set(e,new WeakMap),exit_offsets.get(e));function replace(e,t,n,r){if(hasItems(t)){const e=t.items.indexOf(n);if(e<0)throw new Error("Could not find existing item in parent node for replace");t.items.splice(e,1,r)}else if(hasItem(t))t.item=r;else{if(!isKeyValue(t))throw new Error(`Unsupported parent type "${t.type}" for replace`);t.key===n?t.key=r:t.value=r}shiftNode(r,{lines:n.loc.start.line-r.loc.start.line,columns:n.loc.start.column-r.loc.start.column});const o=getSpan(n.loc),a=getSpan(r.loc);addOffset({lines:a.lines-o.lines,columns:a.columns-o.columns},getExit(e),r,n)}function insert(e,t,n,r){if(!hasItems(t))throw new Error(`Unsupported parent type "${t.type}" for insert`);let o,a;r=null!=r?r:t.items.length,isInlineArray(t)||isInlineTable(t)?({shift:o,offset:a}=insertInline(t,n,r)):({shift:o,offset:a}=insertOnNewLine(t,n,r)),shiftNode(n,o);const l=t.items[r-1],i=l&&getExit(e).get(l);i&&(a.lines+=i.lines,a.columns+=i.columns,isInlineItem(n)&&l&&t.items[r+1]&&(a.columns-=2),getExit(e).delete(l)),getExit(e).set(n,a)}function insertOnNewLine(e,t,n){if(!isBlock(t))throw new Error(`Incompatible child type "${t.type}"`);const r=e.items[n-1],o=isDocument(e)&&!e.items.length;e.items.splice(n,0,t);const a=r?{line:r.loc.end.line,column:isComment(r)?e.loc.start.column:r.loc.start.column}:clonePosition(e.loc.start),l=isTable(t)||isTableArray(t);let i=0;o||(i=l?2:1),a.line+=i;const s={lines:a.line-t.loc.start.line,columns:a.column-t.loc.start.column},c=getSpan(t.loc);return{shift:s,offset:{lines:c.lines+(i-1),columns:c.columns}}}function insertInline(e,t,n){if(!isInlineItem(t))throw new Error(`Incompatible child type "${t.type}"`);const r=null!=n?e.items[n-1]:last(e.items),o=null==n||n===e.items.length;e.items.splice(n,0,t);const a=!!r,l=!o,i=o&&!0===t.comma;a&&(r.comma=!0),l&&(t.comma=!0);const s=isInlineArray(e)&&perLine(e),c=r?{line:r.loc.end.line,column:s?isComment(r)?e.loc.start.column:r.loc.start.column:r.loc.end.column}:clonePosition(e.loc.start);let u=0;if(s)u=1;else{const e=2,t=1;c.column+=a?e:t}c.line+=u;const y={lines:c.line-t.loc.start.line,columns:c.column-t.loc.start.column},p=getSpan(t.loc);return{shift:y,offset:{lines:p.lines+(u-1),columns:p.columns+(a||l?2:0)+(i?1:0)}}}function remove(e,t,n){if(!hasItems(t))throw new Error(`Unsupported parent type "${t.type}" for remove`);let r=t.items.indexOf(n);if(r<0){if((r=t.items.findIndex(e=>hasItem(e)&&e.item===n))<0)throw new Error("Could not find node in parent for removal");n=t.items[r]}const o=t.items[r-1];let a=t.items[r+1];t.items.splice(r,1);let l=getSpan(n.loc);a&&isComment(a)&&a.loc.start.line===n.loc.end.line&&(l=getSpan({start:n.loc.start,end:a.loc.end}),a=t.items[r+1],t.items.splice(r,1));const i=o&&isInlineItem(o),s=o&&o.loc.end.line===n.loc.start.line,c=a&&a.loc.start.line===n.loc.end.line,u=i&&(s||c),y={lines:-(l.lines-(u?1:0)),columns:-l.columns};i&&s&&(y.columns-=2),i&&o&&!a&&(o.comma=!1);const p=o||t,d=o?getExit(e):getEnter(e),f=getExit(e),m=d.get(p);m&&(y.lines+=m.lines,y.columns+=m.columns);const T=f.get(n);T&&(y.lines+=T.lines,y.columns+=T.columns),d.set(p,y)}function applyBracketSpacing(e,t,n=!0){if(!n)return;if(!t.items.length)return;addOffset({lines:0,columns:1},getEnter(e),t);const r=last(t.items);addOffset({lines:0,columns:1},getExit(e),r)}function applyTrailingComma(e,t,n=!1){if(!n)return;if(!t.items.length)return;const r=last(t.items);r.comma=!0,addOffset({lines:0,columns:1},getExit(e),r)}function applyWrites(e){const t=getEnter(e),n=getExit(e),r={lines:0,columns:{}};function o(e){e.loc.start.line+=r.lines,e.loc.start.column+=r.columns[e.loc.start.line]||0;const n=t.get(e);n&&(r.lines+=n.lines,r.columns[e.loc.start.line]=(r.columns[e.loc.start.line]||0)+n.columns)}function a(e){e.loc.end.line+=r.lines,e.loc.end.column+=r.columns[e.loc.end.line]||0;const t=n.get(e);t&&(r.lines+=t.lines,r.columns[e.loc.end.line]=(r.columns[e.loc.end.line]||0)+t.columns)}const l={enter:o,exit:a};traverse(e,{[NodeType.Document]:l,[NodeType.Table]:l,[NodeType.TableArray]:l,[NodeType.InlineTable]:l,[NodeType.InlineArray]:l,[NodeType.InlineItem]:l,[NodeType.TableKey]:l,[NodeType.TableArrayKey]:l,[NodeType.KeyValue]:{enter(e){const t=e.loc.start.line+r.lines,a=n.get(e.key);e.equals+=(r.columns[t]||0)+(a?a.columns:0),o(e)},exit:a},[NodeType.Key]:l,[NodeType.String]:l,[NodeType.Integer]:l,[NodeType.Float]:l,[NodeType.Boolean]:l,[NodeType.DateTime]:l,[NodeType.Comment]:l}),enter_offsets.delete(e),exit_offsets.delete(e)}function shiftNode(e,t,n={}){const{first_line_only:r=!1}=n,o=e.loc.start.line,{lines:a,columns:l}=t,i=e=>{r&&e.loc.start.line!==o||(e.loc.start.column+=l,e.loc.end.column+=l),e.loc.start.line+=a,e.loc.end.line+=a};return traverse(e,{[NodeType.Table]:i,[NodeType.TableKey]:i,[NodeType.TableArray]:i,[NodeType.TableArrayKey]:i,[NodeType.KeyValue](e){i(e),e.equals+=l},[NodeType.Key]:i,[NodeType.String]:i,[NodeType.Integer]:i,[NodeType.Float]:i,[NodeType.Boolean]:i,[NodeType.DateTime]:i,[NodeType.InlineArray]:i,[NodeType.InlineItem]:i,[NodeType.InlineTable]:i,[NodeType.Comment]:i}),e}function perLine(e){if(!e.items.length)return!1;return getSpan(e.loc).lines>e.items.length}function addOffset(e,t,n,r){const o=t.get(r||n);o&&(e.lines+=o.lines,e.columns+=o.columns),t.set(n,e)}function generateDocument(){return{type:NodeType.Document,loc:{start:zero(),end:zero()},items:[]}}function generateTable(e){const t=generateTableKey(e);return{type:NodeType.Table,loc:cloneLocation(t.loc),key:t,items:[]}}function generateTableKey(e){const t=keyValueToRaw(e);return{type:NodeType.TableKey,loc:{start:zero(),end:{line:1,column:t.length+2}},item:{type:NodeType.Key,loc:{start:{line:1,column:1},end:{line:1,column:t.length+1}},value:e,raw:t}}}function generateTableArray(e){const t=generateTableArrayKey(e);return{type:NodeType.TableArray,loc:cloneLocation(t.loc),key:t,items:[]}}function generateTableArrayKey(e){const t=keyValueToRaw(e);return{type:NodeType.TableArrayKey,loc:{start:zero(),end:{line:1,column:t.length+4}},item:{type:NodeType.Key,loc:{start:{line:1,column:2},end:{line:1,column:t.length+2}},value:e,raw:t}}}function generateKeyValue(e,t){const n=generateKey(e),{column:r}=n.loc.end,o=r+1;return shiftNode(t,{lines:0,columns:r+3-t.loc.start.column},{first_line_only:!0}),{type:NodeType.KeyValue,loc:{start:clonePosition(n.loc.start),end:clonePosition(t.loc.end)},key:n,equals:o,value:t}}const IS_BARE_KEY=/[\w,\d,\_,\-]+/;function keyValueToRaw(e){return e.map(e=>IS_BARE_KEY.test(e)?e:JSON.stringify(e)).join(".")}function generateKey(e){const t=keyValueToRaw(e);return{type:NodeType.Key,loc:{start:zero(),end:{line:1,column:t.length}},raw:t,value:e}}function generateString(e){const t=JSON.stringify(e);return{type:NodeType.String,loc:{start:zero(),end:{line:1,column:t.length}},raw:t,value:e}}function generateInteger(e){const t=e.toString();return{type:NodeType.Integer,loc:{start:zero(),end:{line:1,column:t.length}},raw:t,value:e}}function generateFloat(e){const t=e.toString();return{type:NodeType.Float,loc:{start:zero(),end:{line:1,column:t.length}},raw:t,value:e}}function generateBoolean(e){return{type:NodeType.Boolean,loc:{start:zero(),end:{line:1,column:e?4:5}},value:e}}function generateDateTime(e){const t=e.toISOString();return{type:NodeType.DateTime,loc:{start:zero(),end:{line:1,column:t.length}},raw:t,value:e}}function generateInlineArray(){return{type:NodeType.InlineArray,loc:{start:zero(),end:{line:1,column:2}},items:[]}}function generateInlineItem(e){return{type:NodeType.InlineItem,loc:cloneLocation(e.loc),item:e,comma:!1}}function generateInlineTable(){return{type:NodeType.InlineTable,loc:{start:zero(),end:{line:1,column:2}},items:[]}}function formatTopLevel(e){return e.items.filter(e=>{if(!isKeyValue(e))return!1;const t=isInlineTable(e.value),n=isInlineArray(e.value)&&e.value.items.length&&isInlineTable(e.value.items[0].item);return t||n}).forEach(t=>{remove(e,e,t),isInlineTable(t.value)?insert(e,e,formatTable(t)):formatTableArray(t).forEach(t=>{insert(e,e,t)})}),applyWrites(e),e}function formatTable(e){const t=generateTable(e.key.value);for(const n of e.value.items)insert(t,t,n.item);return applyWrites(t),t}function formatTableArray(e){const t=generateDocument();for(const n of e.value.items){const r=generateTableArray(e.key.value);insert(t,t,r);for(const e of n.item.items)insert(t,r,e.item)}return applyWrites(t),t.items}function formatPrintWidth(e,t){return e}function formatEmptyLines(e){let t=0,n=0;for(const r of e.items)0===n&&r.loc.start.line>1?t=1-r.loc.start.line:r.loc.start.line+t>n+2&&(t+=n+2-(r.loc.start.line+t)),shiftNode(r,{lines:t,columns:0}),n=r.loc.end.line;return e}const default_format={printWidth:80,trailingComma:!1,bracketSpacing:!0};function parseJS(e,t={}){t=Object.assign({},default_format,t),e=toJSON(e);const n=generateDocument();for(const r of walkObject(e,t))insert(n,n,r);return applyWrites(n),pipe(n,formatTopLevel,e=>formatPrintWidth(e),formatEmptyLines)}function*walkObject(e,t){for(const n of Object.keys(e))yield generateKeyValue([n],walkValue$1(e[n],t))}function walkValue$1(e,t){if(null==e)throw new Error('"null" and "undefined" values are not supported');return isString(e)?generateString(e):isInteger(e)?generateInteger(e):isFloat(e)?generateFloat(e):isBoolean(e)?generateBoolean(e):isDate(e)?generateDateTime(e):Array.isArray(e)?walkInlineArray(e,t):walkInlineTable(e,t)}function walkInlineArray(e,t){const n=generateInlineArray();for(const r of e){insert(n,n,generateInlineItem(walkValue$1(r,t)))}return applyBracketSpacing(n,n,t.bracketSpacing),applyTrailingComma(n,n,t.trailingComma),applyWrites(n),n}function walkInlineTable(e,t){if(!isObject(e=toJSON(e)))return walkValue$1(e,t);const n=generateInlineTable(),r=[...walkObject(e,t)];for(const e of r){insert(n,n,generateInlineItem(e))}return applyBracketSpacing(n,n,t.bracketSpacing),applyTrailingComma(n,n,t.trailingComma),applyWrites(n),n}function toJSON(e){return e&&!isDate(e)&&"function"==typeof e.toJSON?e.toJSON():e}const BY_NEW_LINE=/(\r\n|\n)/g;function toTOML(e,t="\n"){const n=[];return traverse(e,{[NodeType.TableKey](e){const{start:t,end:r}=e.loc;write(n,{start:t,end:{line:t.line,column:t.column+1}},"["),write(n,{start:{line:r.line,column:r.column-1},end:r},"]")},[NodeType.TableArrayKey](e){const{start:t,end:r}=e.loc;write(n,{start:t,end:{line:t.line,column:t.column+2}},"[["),write(n,{start:{line:r.line,column:r.column-2},end:r},"]]")},[NodeType.KeyValue](e){const{start:{line:t}}=e.loc;write(n,{start:{line:t,column:e.equals},end:{line:t,column:e.equals+1}},"=")},[NodeType.Key](e){write(n,e.loc,e.raw)},[NodeType.String](e){write(n,e.loc,e.raw)},[NodeType.Integer](e){write(n,e.loc,e.raw)},[NodeType.Float](e){write(n,e.loc,e.raw)},[NodeType.Boolean](e){write(n,e.loc,e.value.toString())},[NodeType.DateTime](e){write(n,e.loc,e.raw)},[NodeType.InlineArray](e){const{start:t,end:r}=e.loc;write(n,{start:t,end:{line:t.line,column:t.column+1}},"["),write(n,{start:{line:r.line,column:r.column-1},end:r},"]")},[NodeType.InlineTable](e){const{start:t,end:r}=e.loc;write(n,{start:t,end:{line:t.line,column:t.column+1}},"{"),write(n,{start:{line:r.line,column:r.column-1},end:r},"}")},[NodeType.InlineItem](e){if(!e.comma)return;const t=e.loc.end;write(n,{start:t,end:{line:t.line,column:t.column+1}},",")},[NodeType.Comment](e){write(n,e.loc,e.raw)}}),n.join(t)+t}function write(e,t,n){const r=n.split(BY_NEW_LINE),o=t.end.line-t.start.line+1;if(r.length!==o)throw new Error(`Mismatch between location and raw string, expected ${o} lines for "${n}"`);for(let n=t.start.line;n<=t.end.line;n++){const o=getLine$1(e,n),a=n===t.start.line,l=n===t.end.line,i=a?o.substr(0,t.start.column).padEnd(t.start.column,SPACE):"",s=l?o.substr(t.end.column):"";e[n-1]=i+r[n-t.start.line]+s}}function getLine$1(e,t){if(!e[t-1])for(let n=0;n1?ensureTable(i,n.slice(0,-1)):i)[last(n)]=c,a.add(joinKey(n)),isInlineTable(e.value)&&(l=i,i=c)},exit(e){isInlineTable(e.value)&&(i=l)}},[NodeType.InlineTable]:{enter(){s=!0},exit(){s=!1}}}),n}function toValue(e){switch(e.type){case NodeType.InlineTable:const t=blank();return e.items.forEach(({item:e})=>{const n=e.key.value,r=toValue(e.value);(n.length>1?ensureTable(t,n.slice(0,-1)):t)[last(n)]=r}),t;case NodeType.InlineArray:return e.items.map(e=>toValue(e.item));case NodeType.String:case NodeType.Integer:case NodeType.Float:case NodeType.Boolean:case NodeType.DateTime:return e.value;default:throw new Error(`Unrecognized value type "${e.type}"`)}}function validateKey(e,t,n,r){let o=[],a=0;for(const n of t){if(o.push(n),!has(e,n))return;if(isPrimitive(e[n]))throw new Error(`Invalid key, a value has already been defined for ${o.join(".")}`);const l=joinKey(o);if(Array.isArray(e[n])&&!r.table_arrays.has(l))throw new Error(`Invalid key, cannot add to a static array at ${l}`);const i=a++(e[t]||(e[t]=blank()),Array.isArray(e[t])?last(e[t]):e[t]),e)}function isPrimitive(e){return"object"!=typeof e&&!isDate(e)}function joinKey(e){return e.join(".")}var ChangeType;function isAdd(e){return e.type===ChangeType.Add}function isEdit(e){return e.type===ChangeType.Edit}function isRemove(e){return e.type===ChangeType.Remove}function isMove(e){return e.type===ChangeType.Move}function isRename(e){return e.type===ChangeType.Rename}function diff(e,t,n=[]){return e===t||datesEqual(e,t)?[]:Array.isArray(e)&&Array.isArray(t)?compareArrays(e,t,n):isObject(e)&&isObject(t)?compareObjects(e,t,n):[{type:ChangeType.Edit,path:n}]}function compareObjects(e,t,n=[]){let r=[];const o=Object.keys(e),a=o.map(t=>stableStringify(e[t])),l=Object.keys(t),i=l.map(e=>stableStringify(t[e])),s=(e,t)=>{if(t.indexOf(e)<0)return!1;const n=o[a.indexOf(e)];return!l.includes(n)};return o.forEach((o,c)=>{const u=n.concat(o);if(l.includes(o))merge(r,diff(e[o],t[o],u));else if(s(a[c],i)){const e=l[i.indexOf(a[c])];r.push({type:ChangeType.Rename,path:n,from:o,to:e})}else r.push({type:ChangeType.Remove,path:u})}),l.forEach((e,t)=>{o.includes(e)||s(i[t],a)||r.push({type:ChangeType.Add,path:n.concat(e)})}),r}function compareArrays(e,t,n=[]){let r=[];const o=e.map(stableStringify),a=t.map(stableStringify);a.forEach((l,i)=>{const s=i>=o.length;if(!s&&o[i]===l)return;const c=o.indexOf(l,i+1);if(!s&&c>-1){r.push({type:ChangeType.Move,path:n,from:c,to:i});const e=o.splice(c,1);return void o.splice(i,0,...e)}const u=!a.includes(o[i]);if(!s&&u)return merge(r,diff(e[i],t[i],n.concat(i))),void(o[i]=l);r.push({type:ChangeType.Add,path:n.concat(i)}),o.splice(i,0,l)});for(let e=a.length;e{try{let a=[];if(isKeyValue(e))a=e.key.value;else if(isTable(e))a=e.key.item.value;else if(isTableArray(e)){const t=stableStringify(a=e.key.item.value);n[t]||(n[t]=0);const r=n[t]++;a=a.concat(r)}else isInlineItem(e)&&isKeyValue(e.item)?a=e.item.key.value:isInlineItem(e)&&(a=[o]);return!(!a.length||!arraysEqual(a,t.slice(0,a.length)))&&(r=findByPath(e,t.slice(a.length)),!0)}catch(e){return!1}}),!r)throw new Error(`Could not find node at path ${t.join(".")}`);return r}function tryFindByPath(e,t){try{return findByPath(e,t)}catch(e){}}function findParent(e,t){let n,r=t;for(;r.length&&!n;)n=tryFindByPath(e,r=r.slice(0,-1));if(!n)throw new Error(`Count not find parent node for path ${t.join(".")}`);return n}function patch(e,t,n){const r=[...parseTOML(e)],o=toJS(r);return toTOML(applyChanges({type:NodeType.Document,loc:{start:{line:1,column:0},end:{line:1,column:0}},items:r},parseJS(t,n),diff(o,t)).items)}function applyChanges(e,t,n){return n.forEach(n=>{if(isAdd(n)){const r=findByPath(t,n.path),o=n.path.slice(0,-1);let a,l=last(n.path),i=isTableArray(r);if(isInteger(l)&&!o.some(isInteger)){const t=tryFindByPath(e,o.concat(0));t&&isTableArray(t)&&(i=!0)}if(isTable(r))a=e;else if(i){a=e;const t=e,n=tryFindByPath(t,o.concat(l-1)),r=tryFindByPath(t,o.concat(l));l=r?t.items.indexOf(r):n?t.items.indexOf(n)+1:t.items.length}else isKeyValue(a=findParent(e,n.path))&&(a=a.value);isTableArray(a)||isInlineArray(a)||isDocument(a)?insert(e,a,r,l):insert(e,a,r)}else if(isEdit(n)){let r,o=findByPath(e,n.path),a=findByPath(t,n.path);isKeyValue(o)&&isKeyValue(a)?(r=o,o=o.value,a=a.value):r=findParent(e,n.path),replace(e,r,o,a)}else if(isRemove(n)){let t=findParent(e,n.path);isKeyValue(t)&&(t=t.value);const r=findByPath(e,n.path);remove(e,t,r)}else if(isMove(n)){let t=findByPath(e,n.path);hasItem(t)&&(t=t.item),isKeyValue(t)&&(t=t.value);const r=t.items[n.from];remove(e,t,r),insert(e,t,r,n.to)}else if(isRename(n)){let r=findByPath(e,n.path.concat(n.from)),o=findByPath(t,n.path.concat(n.to));hasItem(r)&&(r=r.item),hasItem(o)&&(o=o.item),replace(e,r,r.key,o.key)}}),applyWrites(e),e}function parse(e){return toJS(parseTOML(e),e)}function stringify(e,t){return toTOML(parseJS(e,t).items)}!function(e){e.Add="Add",e.Edit="Edit",e.Remove="Remove",e.Move="Move",e.Rename="Rename"}(ChangeType||(ChangeType={})),exports.parse=parse,exports.patch=patch,exports.stringify=stringify; +//# sourceMappingURL=toml-patch.cjs.min.js.map diff --git a/node_modules/toml-patch/dist/toml-patch.cjs.min.js.map b/node_modules/toml-patch/dist/toml-patch.cjs.min.js.map new file mode 100644 index 0000000..e321e5a --- /dev/null +++ b/node_modules/toml-patch/dist/toml-patch.cjs.min.js.map @@ -0,0 +1 @@ +{"version":3,"file":"toml-patch.cjs.min.js","sources":["../src/ast.ts","../src/tokenizer.ts","../src/cursor.ts","../src/location.ts","../src/parse-error.ts","../src/utils.ts","../src/parse-string.ts","../src/parse-toml.ts","../src/traverse.ts","../src/writer.ts","../src/generate.ts","../src/format.ts","../src/parse-js.ts","../src/to-toml.ts","../src/to-js.ts","../src/diff.ts","../src/find-by-path.ts","../src/patch.ts","../src/index.ts"],"sourcesContent":["import { Location } from './location';\n\nexport enum NodeType {\n Document = 'Document',\n Table = 'Table',\n TableKey = 'TableKey',\n TableArray = 'TableArray',\n TableArrayKey = 'TableArrayKey',\n KeyValue = 'KeyValue',\n Key = 'Key',\n String = 'String',\n Integer = 'Integer',\n Float = 'Float',\n Boolean = 'Boolean',\n DateTime = 'DateTime',\n InlineArray = 'InlineArray',\n InlineItem = 'InlineItem',\n InlineTable = 'InlineTable',\n Comment = 'Comment'\n}\n\nexport type AST = Iterable;\n\n//\n// Document\n//\n// Top-level document that stores AST nodes\n//\nexport interface Document extends Node {\n type: NodeType.Document;\n items: Array;\n}\nexport function isDocument(node: Node): node is Document {\n return node.type === NodeType.Document;\n}\n\n//\n// Table\n//\n// Top-level object\n//\n// v-------|\n// [table] |\n// b = \"c\" |\n// |\n// # note |\n// ^--|\n// [b]\n//\nexport interface Table extends Node {\n type: NodeType.Table;\n key: TableKey;\n items: Array;\n}\nexport function isTable(node: Node): node is Table {\n return node.type === NodeType.Table;\n}\n\n//\n// TableKey\n//\n// Used to store bracket information for Table keys\n//\n// loc includes brackets\n//\n// [ key ]\n// ^-------^\n//\nexport interface TableKey extends Node {\n type: NodeType.TableKey;\n item: Key;\n}\nexport function isTableKey(node: Node): node is TableKey {\n return node.type === NodeType.TableKey;\n}\n\n//\n// TableArray\n//\n// Top-level array item\n//\n// v---------|\n// [[array]] |\n// a=\"b\" |\n// |\n// # details |\n// ^-|\n// [[array]]\n//\nexport interface TableArray extends Node {\n type: NodeType.TableArray;\n key: TableArrayKey;\n items: Array;\n}\nexport function isTableArray(node: Node): node is TableArray {\n return node.type === NodeType.TableArray;\n}\n\n//\n// TableArrayKey\n//\n// Used to store bracket information for TableArray keys\n// loc includes brackets\n//\n// [[ key ]]\n// ^---------^\n//\nexport interface TableArrayKey extends Node {\n type: NodeType.TableArrayKey;\n item: Key;\n}\nexport function isTableArrayKey(node: Node): node is TableArrayKey {\n return node.type === NodeType.TableArrayKey;\n}\n\n//\n// KeyValue\n//\n// Key and Value nodes, with position information on equals sign\n//\n// key=\"value\" # note\n// ^---------^\n//\nexport interface KeyValue extends Node {\n type: NodeType.KeyValue;\n key: Key;\n value: Value;\n\n // Column index (0-based) of equals sign\n equals: number;\n}\nexport function isKeyValue(node: Node): node is KeyValue {\n return node.type === NodeType.KeyValue;\n}\n\n//\n// Key\n//\n// Store raw key and parts (from dots)\n//\nexport interface Key extends Node {\n type: NodeType.Key;\n raw: string;\n\n // Note: Array for keys with dots\n // e.g. a.b -> raw = 'a.b', value = ['a', 'b']\n value: string[];\n}\nexport function isKey(node: Node): node is Key {\n return node.type === NodeType.Key;\n}\n\n//\n// String\n//\n// loc includes quotes\n//\n// a = \"string\"\n// ^------^\n//\nexport interface String extends Node {\n type: NodeType.String;\n raw: string;\n value: string;\n}\nexport function isString(node: Node): node is String {\n return node.type === NodeType.String;\n}\n\n//\n// Integer\n//\nexport interface Integer extends Node {\n type: NodeType.Integer;\n raw: string;\n value: number;\n}\nexport function isInteger(node: Node): node is Integer {\n return node.type === NodeType.Integer;\n}\n\n//\n// Float\n//\nexport interface Float extends Node {\n type: NodeType.Float;\n raw: string;\n value: number;\n}\nexport function isFloat(node: Node): node is Float {\n return node.type === NodeType.Float;\n}\n\n//\n// Boolean\n//\nexport interface Boolean extends Node {\n type: NodeType.Boolean;\n\n // Only `true` and `false` are permitted\n // -> don't need separate raw and value\n value: boolean;\n}\nexport function isBoolean(node: Node): node is Boolean {\n return node.type === NodeType.Boolean;\n}\n\n//\n// DateTime\n//\n// Note: Currently, Offset Date-Time, Local Date-Time, Local Date, and Local Time\n// are handled via raw\n//\nexport interface DateTime extends Node {\n type: NodeType.DateTime;\n raw: string;\n value: Date;\n}\nexport function isDateTime(node: Node): node is DateTime {\n return node.type === NodeType.DateTime;\n}\n\n//\n// InlineArray\n//\nexport interface InlineArray extends Node {\n type: NodeType.InlineArray;\n items: InlineArrayItem[];\n}\nexport function isInlineArray(node: Node): node is InlineArray {\n return node.type === NodeType.InlineArray;\n}\n\n//\n// InlineArrayItem\n//\n// loc for InlineArrayItem is from start of value to before comma\n// or end-of-value if no comma\n//\n// [ \"a\" ,\"b\", \"c\" ]\n// ^---^ ^-^ ^-^\n//\nexport interface InlineItem extends Node {\n type: NodeType.InlineItem;\n item: TItem;\n comma: boolean;\n}\nexport function isInlineItem(node: Node): node is InlineItem {\n return node.type === NodeType.InlineItem;\n}\n\nexport interface InlineArrayItem extends InlineItem {}\n\n//\n// InlineTable\n//\nexport interface InlineTable extends Node {\n type: NodeType.InlineTable;\n items: InlineTableItem[];\n}\nexport function isInlineTable(node: Node): node is InlineTable {\n return node.type === NodeType.InlineTable;\n}\n\n//\n// InlineTableItem\n//\n// loc for InlineTableItem follows InlineArrayItem\n//\n// { a=\"b\" , c = \"d\" }\n// ^------^ ^--------^\n//\nexport interface InlineTableItem extends InlineItem {}\n\n//\n// Comment\n//\n// loc starts at \"#\" and goes to end of comment (trailing whitespace ignored)\n//\n// # comment here\n// ^------------^\n//\nexport interface Comment extends Node {\n type: NodeType.Comment;\n raw: string;\n}\nexport function isComment(node: Node): node is Comment {\n return node.type === NodeType.Comment;\n}\n\n//\n// Combinations\n//\n\nexport interface WithItems extends Node {\n items: Node[];\n}\nexport function hasItems(node: Node): node is WithItems {\n return (\n isDocument(node) ||\n isTable(node) ||\n isTableArray(node) ||\n isInlineTable(node) ||\n isInlineArray(node)\n );\n}\n\nexport interface WithItem extends Node {\n item: Node;\n}\nexport function hasItem(node: Node): node is WithItem {\n return isTableKey(node) || isTableArrayKey(node) || isInlineItem(node);\n}\n\nexport type Block = KeyValue | Table | TableArray | Comment;\nexport function isBlock(node: Node): node is Block {\n return isKeyValue(node) || isTable(node) || isTableArray(node) || isComment(node);\n}\n\nexport type Value =\n | String\n | Integer\n | Float\n | Boolean\n | DateTime\n | InlineArray\n | InlineTable;\nexport function isValue(node: Node): node is Value {\n return (\n isString(node) ||\n isInteger(node) ||\n isFloat(node) ||\n isBoolean(node) ||\n isDateTime(node) ||\n isInlineArray(node) ||\n isInlineTable(node)\n );\n}\n\nexport interface Node {\n type: NodeType;\n loc: Location;\n}\n","import Cursor, { iterator } from './cursor';\nimport { Location, Locator, createLocate, findPosition } from './location';\nimport ParseError from './parse-error';\n\nexport enum TokenType {\n Bracket = 'Bracket',\n Curly = 'Curly',\n Equal = 'Equal',\n Comma = 'Comma',\n Dot = 'Dot',\n Comment = 'Comment',\n Literal = 'Literal'\n}\n\nexport interface Token {\n type: TokenType;\n raw: string;\n loc: Location;\n}\n\nexport const IS_WHITESPACE = /\\s/;\nexport const IS_NEW_LINE = /(\\r\\n|\\n)/;\nexport const DOUBLE_QUOTE = `\"`;\nexport const SINGLE_QUOTE = `'`;\nexport const SPACE = ' ';\nexport const ESCAPE = '\\\\';\n\nconst IS_VALID_LEADING_CHARACTER = /[\\w,\\d,\\\",\\',\\+,\\-,\\_]/;\n\nexport function* tokenize(input: string): IterableIterator {\n const cursor = new Cursor(iterator(input));\n cursor.next();\n\n const locate = createLocate(input);\n\n while (!cursor.done) {\n if (IS_WHITESPACE.test(cursor.value!)) {\n // (skip whitespace)\n } else if (cursor.value === '[' || cursor.value === ']') {\n // Handle special characters: [, ], {, }, =, comma\n yield specialCharacter(cursor, locate, TokenType.Bracket);\n } else if (cursor.value === '{' || cursor.value === '}') {\n yield specialCharacter(cursor, locate, TokenType.Curly);\n } else if (cursor.value === '=') {\n yield specialCharacter(cursor, locate, TokenType.Equal);\n } else if (cursor.value === ',') {\n yield specialCharacter(cursor, locate, TokenType.Comma);\n } else if (cursor.value === '.') {\n yield specialCharacter(cursor, locate, TokenType.Dot);\n } else if (cursor.value === '#') {\n // Handle comments = # -> EOL\n yield comment(cursor, locate);\n } else {\n const multiline_char =\n checkThree(input, cursor.index, SINGLE_QUOTE) ||\n checkThree(input, cursor.index, DOUBLE_QUOTE);\n\n if (multiline_char) {\n // Multi-line literals or strings = no escaping\n yield multiline(cursor, locate, multiline_char, input);\n } else {\n yield string(cursor, locate, input);\n }\n }\n\n cursor.next();\n }\n}\n\nfunction specialCharacter(cursor: Cursor, locate: Locator, type: TokenType): Token {\n return { type, raw: cursor.value!, loc: locate(cursor.index, cursor.index + 1) };\n}\n\nfunction comment(cursor: Cursor, locate: Locator): Token {\n const start = cursor.index;\n let raw = cursor.value!;\n while (!cursor.peek().done && !IS_NEW_LINE.test(cursor.peek().value!)) {\n cursor.next();\n raw += cursor.value!;\n }\n\n // Early exit is ok for comment, no closing conditions\n\n return {\n type: TokenType.Comment,\n raw,\n loc: locate(start, cursor.index + 1)\n };\n}\n\nfunction multiline(\n cursor: Cursor,\n locate: Locator,\n multiline_char: string,\n input: string\n): Token {\n const start = cursor.index;\n let quotes = multiline_char + multiline_char + multiline_char;\n let raw = quotes;\n\n // Skip over quotes\n cursor.next();\n cursor.next();\n cursor.next();\n\n while (!cursor.done && !checkThree(input, cursor.index, multiline_char)) {\n raw += cursor.value;\n cursor.next();\n }\n\n if (cursor.done) {\n throw new ParseError(\n input,\n findPosition(input, cursor.index),\n `Expected close of multiline string with ${quotes}, reached end of file`\n );\n }\n\n raw += quotes;\n\n cursor.next();\n cursor.next();\n\n return {\n type: TokenType.Literal,\n raw,\n loc: locate(start, cursor.index + 1)\n };\n}\n\nfunction string(cursor: Cursor, locate: Locator, input: string): Token {\n // Remaining possibilities: keys, strings, literals, integer, float, boolean\n //\n // Special cases:\n // \"...\" -> quoted\n // '...' -> quoted\n // \"...\".'...' -> bare\n // 0000-00-00 00:00:00 -> bare\n //\n // See https://github.com/toml-lang/toml#offset-date-time\n //\n // | For the sake of readability, you may replace the T delimiter between date and time with a space (as permitted by RFC 3339 section 5.6).\n // | `odt4 = 1979-05-27 07:32:00Z`\n //\n // From RFC 3339:\n //\n // | NOTE: ISO 8601 defines date and time separated by \"T\".\n // | Applications using this syntax may choose, for the sake of\n // | readability, to specify a full-date and full-time separated by\n // | (say) a space character.\n\n // First, check for invalid characters\n if (!IS_VALID_LEADING_CHARACTER.test(cursor.value!)) {\n throw new ParseError(\n input,\n findPosition(input, cursor.index),\n `Unsupported character \"${cursor.value}\". Expected ALPHANUMERIC, \", ', +, -, or _`\n );\n }\n\n const start = cursor.index;\n let raw = cursor.value!;\n let double_quoted = cursor.value === DOUBLE_QUOTE;\n let single_quoted = cursor.value === SINGLE_QUOTE;\n\n const isFinished = (cursor: Cursor) => {\n if (cursor.peek().done) return true;\n const next_item = cursor.peek().value!;\n\n return (\n !(double_quoted || single_quoted) &&\n (IS_WHITESPACE.test(next_item) ||\n next_item === ',' ||\n next_item === '.' ||\n next_item === ']' ||\n next_item === '}' ||\n next_item === '=')\n );\n };\n\n while (!cursor.done && !isFinished(cursor)) {\n cursor.next();\n\n if (cursor.value === DOUBLE_QUOTE) double_quoted = !double_quoted;\n if (cursor.value === SINGLE_QUOTE && !double_quoted) single_quoted = !single_quoted;\n\n raw += cursor.value!;\n\n if (cursor.peek().done) break;\n let next_item = cursor.peek().value!;\n\n // If next character is escape and currently double-quoted,\n // check for escaped quote\n if (double_quoted && cursor.value === ESCAPE) {\n if (next_item === DOUBLE_QUOTE) {\n raw += DOUBLE_QUOTE;\n cursor.next();\n } else if (next_item === ESCAPE) {\n raw += ESCAPE;\n cursor.next();\n }\n }\n }\n\n if (double_quoted || single_quoted) {\n throw new ParseError(\n input,\n findPosition(input, start),\n `Expected close of string with ${double_quoted ? DOUBLE_QUOTE : SINGLE_QUOTE}`\n );\n }\n\n return {\n type: TokenType.Literal,\n raw,\n loc: locate(start, cursor.index + 1)\n };\n}\n\nfunction checkThree(input: string, current: number, check: string): false | string {\n return (\n input[current] === check &&\n input[current + 1] === check &&\n input[current + 2] === check &&\n check\n );\n}\n","export function iterator(value: Iterable): Iterator {\n return value[Symbol.iterator]();\n}\n\nexport default class Cursor implements Iterator {\n iterator: Iterator;\n index: number;\n value?: T;\n done: boolean;\n peeked: IteratorResult | null;\n\n constructor(iterator: Iterator) {\n this.iterator = iterator;\n this.index = -1;\n this.value = undefined;\n this.done = false;\n this.peeked = null;\n }\n\n next(): IteratorResult {\n if (this.done) return done();\n\n const result = this.peeked || this.iterator.next();\n\n this.index += 1;\n this.value = result.value;\n this.done = result.done;\n this.peeked = null;\n\n return result;\n }\n\n peek(): IteratorResult {\n if (this.done) return done();\n if (this.peeked) return this.peeked;\n\n this.peeked = this.iterator.next();\n return this.peeked;\n }\n\n [Symbol.iterator]() {\n return this;\n }\n}\n\nfunction done(): IteratorResult {\n return { value: undefined, done: true };\n}\n","export interface Location {\n start: Position;\n end: Position;\n}\n\nexport interface Position {\n // Note: line is 1-indexed while column is 0-indexed\n line: number;\n column: number;\n}\n\nexport interface Span {\n lines: number;\n columns: number;\n}\n\nexport function getSpan(location: Location): Span {\n return {\n lines: location.end.line - location.start.line + 1,\n columns: location.end.column - location.start.column\n };\n}\n\nexport type Locator = (start: number, end: number) => Location;\nexport function createLocate(input: string): Locator {\n const lines = findLines(input);\n\n return (start: number, end: number) => {\n return {\n start: findPosition(lines, start),\n end: findPosition(lines, end)\n };\n };\n}\n\nexport function findPosition(input: string | number[], index: number): Position {\n // abc\\ndef\\ng\n // 0123 4567 8\n // 012\n // 0\n //\n // lines = [3, 7, 9]\n //\n // c = 2: 0 -> 1, 2 - (undefined + 1 || 0) = 2\n // 3: 0 -> 1, 3 - (undefined + 1 || 0) = 3\n // e = 5: 1 -> 2, 5 - (3 + 1 || 0) = 1\n // g = 8: 2 -> 3, 8 - (7 + 1 || 0) = 0\n\n const lines = Array.isArray(input) ? input : findLines(input);\n const line = lines.findIndex(line_index => line_index >= index) + 1;\n const column = index - (lines[line - 2] + 1 || 0);\n\n return { line, column };\n}\n\nexport function getLine(input: string, position: Position): string {\n const lines = findLines(input);\n const start = lines[position.line - 2] || 0;\n const end = lines[position.line - 1] || input.length;\n\n return input.substr(start, end - start);\n}\n\nexport function findLines(input: string): number[] {\n // exec is stateful, so create new regexp each time\n const BY_NEW_LINE = /[\\r\\n|\\n]/g;\n const indexes: number[] = [];\n\n let match;\n while ((match = BY_NEW_LINE.exec(input)) != null) {\n indexes.push(match.index);\n }\n indexes.push(input.length + 1);\n\n return indexes;\n}\n\nexport function clonePosition(position: Position): Position {\n return { line: position.line, column: position.column };\n}\n\nexport function cloneLocation(location: Location): Location {\n return { start: clonePosition(location.start), end: clonePosition(location.end) };\n}\n\nexport function zero(): Position {\n return { line: 1, column: 0 };\n}\n","import { Position, getLine } from './location';\n\nexport default class ParseError extends Error {\n line: number;\n column: number;\n\n constructor(input: string, position: Position, message: string) {\n let error_message = `Error parsing TOML (${position.line}, ${position.column + 1}):\\n`;\n\n if (input) {\n const line = getLine(input, position);\n const pointer = `${whitespace(position.column)}^`;\n\n if (line) error_message += `${line}\\n${pointer}\\n`;\n }\n error_message += message;\n\n super(error_message);\n\n this.line = position.line;\n this.column = position.column;\n }\n}\n\nexport function isParseError(error: Error): error is ParseError {\n return error && Object.prototype.hasOwnProperty.call(error, 'line');\n}\n\nfunction whitespace(count: number, character: string = ' '): string {\n return character.repeat(count);\n}\n","export function last(values: TValue[]): TValue | undefined {\n return values[values.length - 1];\n}\n\nexport type BlankObject = { [key: string]: any };\n\nexport function blank(): BlankObject {\n return Object.create(null);\n}\n\nexport function isString(value: any): value is string {\n return typeof value === 'string';\n}\n\nexport function isInteger(value: any): value is number {\n return typeof value === 'number' && value % 1 === 0;\n}\n\nexport function isFloat(value: any): value is number {\n return typeof value === 'number' && !isInteger(value);\n}\n\nexport function isBoolean(value: any): value is boolean {\n return typeof value === 'boolean';\n}\n\nexport function isDate(value: any): value is Date {\n return Object.prototype.toString.call(value) === '[object Date]';\n}\n\nexport function isObject(value: any): boolean {\n return value && typeof value === 'object' && !isDate(value) && !Array.isArray(value);\n}\n\nexport function isIterable(value: any): value is Iterable {\n return value != null && typeof value[Symbol.iterator] === 'function';\n}\n\nexport function has(object: any, key: string): boolean {\n return Object.prototype.hasOwnProperty.call(object, key);\n}\n\nexport function arraysEqual(a: TItem[], b: TItem[]): boolean {\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) return false;\n }\n\n return true;\n}\n\nexport function datesEqual(a: any, b: any): boolean {\n return isDate(a) && isDate(b) && a.toISOString() === b.toISOString();\n}\n\nexport function pipe(value: TValue, ...fns: Array<(value: TValue) => TValue>): TValue {\n return fns.reduce((value, fn) => fn(value), value);\n}\n\nexport function stableStringify(object: any): string {\n if (isObject(object)) {\n const key_values = Object.keys(object)\n .sort()\n .map(key => `${JSON.stringify(key)}:${stableStringify(object[key])}`);\n\n return `{${key_values.join(',')}}`;\n } else if (Array.isArray(object)) {\n return `[${object.map(stableStringify).join(',')}]`;\n } else {\n return JSON.stringify(object);\n }\n}\n\nexport function merge(target: TValue[], values: TValue[]) {\n // __mutating__: merge values into target\n // Reference: https://dev.to/uilicious/javascript-array-push-is-945x-faster-than-array-concat-1oki\n const original_length = target.length;\n const added_length = values.length;\n target.length = original_length + added_length;\n\n for (let i = 0; i < added_length; i++) {\n target[original_length + i] = values[i];\n }\n}\n","import { SINGLE_QUOTE, DOUBLE_QUOTE } from './tokenizer';\nimport { pipe } from './utils';\n\nconst TRIPLE_DOUBLE_QUOTE = `\"\"\"`;\nconst TRIPLE_SINGLE_QUOTE = `'''`;\nconst LF = '\\\\n';\nconst CRLF = '\\\\r\\\\n';\nconst IS_CRLF = /\\r\\n/g;\nconst IS_LF = /\\n/g;\nconst IS_LEADING_NEW_LINE = /^(\\r\\n|\\n)/;\nconst IS_LINE_ENDING_BACKSLASH = /\\\\\\s*[\\n\\r\\n]\\s*/g;\n\nexport function parseString(raw: string): string {\n if (raw.startsWith(TRIPLE_SINGLE_QUOTE)) {\n return pipe(\n trim(raw, 3),\n trimLeadingWhitespace\n );\n } else if (raw.startsWith(SINGLE_QUOTE)) {\n return trim(raw, 1);\n } else if (raw.startsWith(TRIPLE_DOUBLE_QUOTE)) {\n return pipe(\n trim(raw, 3),\n trimLeadingWhitespace,\n lineEndingBackslash,\n escapeNewLines,\n unescape\n );\n } else if (raw.startsWith(DOUBLE_QUOTE)) {\n return pipe(\n trim(raw, 1),\n unescape\n );\n } else {\n return raw;\n }\n}\n\nexport function unescape(escaped: string): string {\n // JSON.parse handles everything except \\UXXXXXXXX\n // replace those instances with code point, escape that, and then parse\n const LARGE_UNICODE = /\\\\U[a-fA-F0-9]{8}/g;\n const json_escaped = escaped.replace(LARGE_UNICODE, value => {\n const code_point = parseInt(value.replace('\\\\U', ''), 16);\n const as_string = String.fromCodePoint(code_point);\n\n return trim(JSON.stringify(as_string), 1);\n });\n\n return JSON.parse(`\"${json_escaped}\"`);\n}\n\nexport function escape(value: string): string {\n return trim(JSON.stringify(value), 1);\n}\n\nfunction trim(value: string, count: number): string {\n return value.substr(count, value.length - count * 2);\n}\n\nfunction trimLeadingWhitespace(value: string): string {\n return IS_LEADING_NEW_LINE.test(value) ? value.substr(1) : value;\n}\n\nfunction escapeNewLines(value: string): string {\n return value.replace(IS_CRLF, CRLF).replace(IS_LF, LF);\n}\n\nfunction lineEndingBackslash(value: string): string {\n return value.replace(IS_LINE_ENDING_BACKSLASH, '');\n}\n","import {\n NodeType,\n KeyValue,\n Table,\n TableKey,\n TableArray,\n TableArrayKey,\n Key,\n Value,\n String,\n Integer,\n Float,\n Boolean,\n DateTime,\n InlineTable,\n InlineArray,\n InlineItem,\n Comment,\n AST,\n Block\n} from './ast';\nimport { Token, TokenType, tokenize, DOUBLE_QUOTE, SINGLE_QUOTE } from './tokenizer';\nimport { parseString } from './parse-string';\nimport Cursor from './cursor';\nimport { clonePosition, cloneLocation } from './location';\nimport ParseError from './parse-error';\nimport { merge } from './utils';\n\nconst TRUE = 'true';\nconst FALSE = 'false';\nconst HAS_E = /e/i;\nconst IS_DIVIDER = /\\_/g;\nconst IS_INF = /inf/;\nconst IS_NAN = /nan/;\nconst IS_HEX = /^0x/;\nconst IS_OCTAL = /^0o/;\nconst IS_BINARY = /^0b/;\nexport const IS_FULL_DATE = /(\\d{4})-(\\d{2})-(\\d{2})/;\nexport const IS_FULL_TIME = /(\\d{2}):(\\d{2}):(\\d{2})/;\n\nexport default function* parseTOML(input: string): AST {\n const tokens = tokenize(input);\n const cursor = new Cursor(tokens);\n\n while (!cursor.next().done) {\n yield* walkBlock(cursor, input);\n }\n}\n\nfunction* walkBlock(cursor: Cursor, input: string): IterableIterator {\n if (cursor.value!.type === TokenType.Comment) {\n yield comment(cursor);\n } else if (cursor.value!.type === TokenType.Bracket) {\n yield table(cursor, input);\n } else if (cursor.value!.type === TokenType.Literal) {\n yield* keyValue(cursor, input);\n } else {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Unexpected token \"${cursor.value!.type}\". Expected Comment, Bracket, or String`\n );\n }\n}\n\nfunction* walkValue(cursor: Cursor, input: string): IterableIterator {\n if (cursor.value!.type === TokenType.Literal) {\n if (cursor.value!.raw[0] === DOUBLE_QUOTE || cursor.value!.raw[0] === SINGLE_QUOTE) {\n yield string(cursor);\n } else if (cursor.value!.raw === TRUE || cursor.value!.raw === FALSE) {\n yield boolean(cursor);\n } else if (IS_FULL_DATE.test(cursor.value!.raw) || IS_FULL_TIME.test(cursor.value!.raw)) {\n yield datetime(cursor, input);\n } else if (\n (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) ||\n IS_INF.test(cursor.value!.raw) ||\n IS_NAN.test(cursor.value!.raw) ||\n (HAS_E.test(cursor.value!.raw) && !IS_HEX.test(cursor.value!.raw))\n ) {\n yield float(cursor, input);\n } else {\n yield integer(cursor);\n }\n } else if (cursor.value!.type === TokenType.Curly) {\n yield inlineTable(cursor, input);\n } else if (cursor.value!.type === TokenType.Bracket) {\n const [inline_array, comments] = inlineArray(cursor, input);\n\n yield inline_array;\n yield* comments;\n } else {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Unrecognized token type \"${cursor.value!.type}\". Expected String, Curly, or Bracket`\n );\n }\n}\n\nfunction comment(cursor: Cursor): Comment {\n // # line comment\n // ^------------^ Comment\n return {\n type: NodeType.Comment,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw\n };\n}\n\nfunction table(cursor: Cursor, input: string): Table | TableArray {\n // Table or TableArray\n //\n // [ key ]\n // ^-----^ TableKey\n // ^-^ Key\n //\n // [[ key ]]\n // ^ ------^ TableArrayKey\n // ^-^ Key\n //\n // a = \"b\" < Items\n // # c |\n // d = \"f\" <\n //\n // ...\n const type =\n !cursor.peek().done && cursor.peek().value!.type === TokenType.Bracket\n ? NodeType.TableArray\n : NodeType.Table;\n const is_table = type === NodeType.Table;\n\n if (is_table && cursor.value!.raw !== '[') {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected table opening \"[\", found ${cursor.value!.raw}`\n );\n }\n if (!is_table && (cursor.value!.raw !== '[' || cursor.peek().value!.raw !== '[')) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected array of tables opening \"[[\", found ${cursor.value!.raw + cursor.peek().value!.raw}`\n );\n }\n\n // Set start location from opening tag\n const key = is_table\n ? ({\n type: NodeType.TableKey,\n loc: cursor.value!.loc\n } as Partial)\n : ({\n type: NodeType.TableArrayKey,\n loc: cursor.value!.loc\n } as Partial);\n\n // Skip to cursor.value for key value\n cursor.next();\n if (type === NodeType.TableArray) cursor.next();\n\n if (cursor.done) {\n throw new ParseError(input, key.loc!.start, `Expected table key, reached end of file`);\n }\n\n key.item = {\n type: NodeType.Key,\n loc: cloneLocation(cursor.value!.loc),\n raw: cursor.value!.raw,\n value: [parseString(cursor.value!.raw)]\n };\n\n while (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n cursor.next();\n const dot = cursor.value!;\n\n cursor.next();\n const before = ' '.repeat(dot.loc.start.column - key.item.loc.end.column);\n const after = ' '.repeat(cursor.value!.loc.start.column - dot.loc.end.column);\n\n key.item.loc.end = cursor.value!.loc.end;\n key.item.raw += `${before}.${after}${cursor.value!.raw}`;\n key.item.value.push(parseString(cursor.value!.raw));\n }\n\n cursor.next();\n\n if (is_table && (cursor.done || cursor.value!.raw !== ']')) {\n throw new ParseError(\n input,\n cursor.done ? key.item.loc.end : cursor.value!.loc.start,\n `Expected table closing \"]\", found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n if (\n !is_table &&\n (cursor.done ||\n cursor.peek().done ||\n cursor.value!.raw !== ']' ||\n cursor.peek().value!.raw !== ']')\n ) {\n throw new ParseError(\n input,\n cursor.done || cursor.peek().done ? key.item.loc.end : cursor.value!.loc.start,\n `Expected array of tables closing \"]]\", found ${\n cursor.done || cursor.peek().done\n ? 'end of file'\n : cursor.value!.raw + cursor.peek().value!.raw\n }`\n );\n }\n\n // Set end location from closing tag\n if (!is_table) cursor.next();\n key.loc!.end = cursor.value!.loc.end;\n\n // Add child items\n let items: Array = [];\n while (!cursor.peek().done && cursor.peek().value!.type !== TokenType.Bracket) {\n cursor.next();\n merge(items, [...walkBlock(cursor, input)] as Array);\n }\n\n return {\n type: is_table ? NodeType.Table : NodeType.TableArray,\n loc: {\n start: clonePosition(key.loc!.start),\n end: items.length\n ? clonePosition(items[items.length - 1].loc.end)\n : clonePosition(key.loc!.end)\n },\n key: key as TableKey | TableArrayKey,\n items\n } as Table | TableArray;\n}\n\nfunction keyValue(cursor: Cursor, input: string): Array {\n // 3. KeyValue\n //\n // key = value\n // ^-^ key\n // ^ equals\n // ^---^ value\n const key: Key = {\n type: NodeType.Key,\n loc: cloneLocation(cursor.value!.loc),\n raw: cursor.value!.raw,\n value: [parseString(cursor.value!.raw)]\n };\n\n while (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n cursor.next();\n cursor.next();\n\n key.loc.end = cursor.value!.loc.end;\n key.raw += `.${cursor.value!.raw}`;\n key.value.push(parseString(cursor.value!.raw));\n }\n\n cursor.next();\n\n if (cursor.done || cursor.value!.type !== TokenType.Equal) {\n throw new ParseError(\n input,\n cursor.done ? key.loc.end : cursor.value!.loc.start,\n `Expected \"=\" for key-value, found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n\n const equals = cursor.value!.loc.start.column;\n\n cursor.next();\n\n if (cursor.done) {\n throw new ParseError(input, key.loc.start, `Expected value for key-value, reached end of file`);\n }\n\n const [value, ...comments] = walkValue(cursor, input) as Iterable;\n\n return [\n {\n type: NodeType.KeyValue,\n key,\n value: value as Value,\n loc: {\n start: clonePosition(key.loc.start),\n end: clonePosition(value.loc.end)\n },\n equals\n },\n ...(comments as Comment[])\n ];\n}\n\nfunction string(cursor: Cursor): String {\n return {\n type: NodeType.String,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw,\n value: parseString(cursor.value!.raw)\n };\n}\n\nfunction boolean(cursor: Cursor): Boolean {\n return {\n type: NodeType.Boolean,\n loc: cursor.value!.loc,\n value: cursor.value!.raw === TRUE\n };\n}\n\nfunction datetime(cursor: Cursor, input: string): DateTime {\n // Possible values:\n //\n // Offset Date-Time\n // | odt1 = 1979-05-27T07:32:00Z\n // | odt2 = 1979-05-27T00:32:00-07:00\n // | odt3 = 1979-05-27T00:32:00.999999-07:00\n // | odt4 = 1979-05-27 07:32:00Z\n //\n // Local Date-Time\n // | ldt1 = 1979-05-27T07:32:00\n // | ldt2 = 1979-05-27T00:32:00.999999\n //\n // Local Date\n // | ld1 = 1979-05-27\n //\n // Local Time\n // | lt1 = 07:32:00\n // | lt2 = 00:32:00.999999\n let loc = cursor.value!.loc;\n let raw = cursor.value!.raw;\n let value: Date;\n\n // If next token is string,\n // check if raw is full date and following is full time\n if (\n !cursor.peek().done &&\n cursor.peek().value!.type === TokenType.Literal &&\n IS_FULL_DATE.test(raw) &&\n IS_FULL_TIME.test(cursor.peek().value!.raw)\n ) {\n const start = loc.start;\n\n cursor.next();\n loc = { start, end: cursor.value!.loc.end };\n raw += ` ${cursor.value!.raw}`;\n }\n\n if (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n const start = loc.start;\n\n cursor.next();\n\n if (cursor.peek().done || cursor.peek().value!.type !== TokenType.Literal) {\n throw new ParseError(input, cursor.value!.loc.end, `Expected fractional value for DateTime`);\n }\n cursor.next();\n\n loc = { start, end: cursor.value!.loc.end };\n raw += `.${cursor.value!.raw}`;\n }\n\n if (!IS_FULL_DATE.test(raw)) {\n // For local time, use local ISO date\n const [local_date] = new Date().toISOString().split('T');\n value = new Date(`${local_date}T${raw}`);\n } else {\n value = new Date(raw.replace(' ', 'T'));\n }\n\n return {\n type: NodeType.DateTime,\n loc,\n raw,\n value\n };\n}\n\nfunction float(cursor: Cursor, input: string): Float {\n let loc = cursor.value!.loc;\n let raw = cursor.value!.raw;\n let value;\n\n if (IS_INF.test(raw)) {\n value = raw === '-inf' ? -Infinity : Infinity;\n } else if (IS_NAN.test(raw)) {\n value = raw === '-nan' ? -NaN : NaN;\n } else if (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n const start = loc.start;\n\n // From spec:\n // | A fractional part is a decimal point followed by one or more digits.\n //\n // -> Don't have to handle \"4.\" (i.e. nothing behind decimal place)\n\n cursor.next();\n\n if (cursor.peek().done || cursor.peek().value!.type !== TokenType.Literal) {\n throw new ParseError(input, cursor.value!.loc.end, `Expected fraction value for Float`);\n }\n cursor.next();\n\n raw += `.${cursor.value!.raw}`;\n loc = { start, end: cursor.value!.loc.end };\n value = Number(raw.replace(IS_DIVIDER, ''));\n } else {\n value = Number(raw.replace(IS_DIVIDER, ''));\n }\n\n return { type: NodeType.Float, loc, raw, value };\n}\n\nfunction integer(cursor: Cursor): Integer {\n // > Integer values -0 and +0 are valid and identical to an unprefixed zero\n if (cursor.value!.raw === '-0' || cursor.value!.raw === '+0') {\n return {\n type: NodeType.Integer,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw,\n value: 0\n };\n }\n\n let radix = 10;\n if (IS_HEX.test(cursor.value!.raw)) {\n radix = 16;\n } else if (IS_OCTAL.test(cursor.value!.raw)) {\n radix = 8;\n } else if (IS_BINARY.test(cursor.value!.raw)) {\n radix = 2;\n }\n\n const value = parseInt(\n cursor\n .value!.raw.replace(IS_DIVIDER, '')\n .replace(IS_OCTAL, '')\n .replace(IS_BINARY, ''),\n radix\n );\n\n return {\n type: NodeType.Integer,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw,\n value\n };\n}\n\nfunction inlineTable(cursor: Cursor, input: string): InlineTable {\n if (cursor.value!.raw !== '{') {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected \"{\" for inline table, found ${cursor.value!.raw}`\n );\n }\n\n // 6. InlineTable\n const value: InlineTable = {\n type: NodeType.InlineTable,\n loc: cloneLocation(cursor.value!.loc),\n items: []\n };\n\n cursor.next();\n\n while (\n !cursor.done &&\n !(cursor.value!.type === TokenType.Curly && (cursor.value as Token).raw === '}')\n ) {\n if ((cursor.value as Token).type === TokenType.Comma) {\n const previous = value.items[value.items.length - 1];\n if (!previous) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n 'Found \",\" without previous value in inline table'\n );\n }\n\n previous.comma = true;\n previous.loc.end = cursor.value!.loc.start;\n\n cursor.next();\n continue;\n }\n\n const [item] = walkBlock(cursor, input);\n if (item.type !== NodeType.KeyValue) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Only key-values are supported in inline tables, found ${item.type}`\n );\n }\n\n const inline_item: InlineItem = {\n type: NodeType.InlineItem,\n loc: cloneLocation(item.loc),\n item,\n comma: false\n };\n\n value.items.push(inline_item);\n cursor.next();\n }\n\n if (\n cursor.done ||\n cursor.value!.type !== TokenType.Curly ||\n (cursor.value as Token).raw !== '}'\n ) {\n throw new ParseError(\n input,\n cursor.done ? value.loc.start : cursor.value!.loc.start,\n `Expected \"}\", found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n\n value.loc.end = cursor.value!.loc.end;\n\n return value;\n}\n\nfunction inlineArray(cursor: Cursor, input: string): [InlineArray, Comment[]] {\n // 7. InlineArray\n if (cursor.value!.raw !== '[') {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected \"[\" for inline array, found ${cursor.value!.raw}`\n );\n }\n\n const value: InlineArray = {\n type: NodeType.InlineArray,\n loc: cloneLocation(cursor.value!.loc),\n items: []\n };\n let comments: Comment[] = [];\n\n cursor.next();\n\n while (\n !cursor.done &&\n !(cursor.value!.type === TokenType.Bracket && (cursor.value as Token).raw === ']')\n ) {\n if ((cursor.value as Token).type === TokenType.Comma) {\n const previous = value.items[value.items.length - 1];\n if (!previous) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n 'Found \",\" without previous value for inline array'\n );\n }\n\n previous.comma = true;\n previous.loc.end = cursor.value!.loc.start;\n } else if ((cursor.value as Token).type === TokenType.Comment) {\n comments.push(comment(cursor));\n } else {\n const [item, ...additional_comments] = walkValue(cursor, input);\n const inline_item: InlineItem = {\n type: NodeType.InlineItem,\n loc: cloneLocation(item.loc),\n item,\n comma: false\n };\n\n value.items.push(inline_item);\n merge(comments, additional_comments as Comment[]);\n }\n\n cursor.next();\n }\n\n if (\n cursor.done ||\n cursor.value!.type !== TokenType.Bracket ||\n (cursor.value as Token).raw !== ']'\n ) {\n throw new ParseError(\n input,\n cursor.done ? value.loc.start : cursor.value!.loc.start,\n `Expected \"]\", found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n\n value.loc.end = cursor.value!.loc.end;\n\n return [value, comments];\n}\n","import {\n NodeType,\n AST,\n Node,\n Document,\n Table,\n TableKey,\n TableArray,\n TableArrayKey,\n KeyValue,\n Key,\n String,\n Integer,\n Float,\n Boolean,\n DateTime,\n Comment,\n InlineArray,\n InlineTable,\n InlineItem\n} from './ast';\nimport { isIterable } from './utils';\n\nexport type Visit = (node: TNode, parent: TNode | null) => void;\nexport type EnterExit = { enter?: Visit; exit?: Visit };\n\nexport type Visitor = {\n Document?: Visit | EnterExit;\n Table?: Visit | EnterExit
;\n TableKey?: Visit | EnterExit;\n TableArray?: Visit | EnterExit;\n TableArrayKey?: Visit | EnterExit;\n KeyValue?: Visit | EnterExit;\n Key?: Visit | EnterExit;\n String?: Visit | EnterExit;\n Integer?: Visit | EnterExit;\n Float?: Visit | EnterExit;\n Boolean?: Visit | EnterExit;\n DateTime?: Visit | EnterExit;\n InlineArray?: Visit | EnterExit;\n InlineItem?: Visit | EnterExit;\n InlineTable?: Visit | EnterExit;\n Comment?: Visit | EnterExit;\n};\n\nexport default function traverse(ast: AST | Node, visitor: Visitor) {\n if (isIterable(ast)) {\n traverseArray(ast, null);\n } else {\n traverseNode(ast, null);\n }\n\n function traverseArray(array: Iterable, parent: Node | null) {\n for (const node of array) {\n traverseNode(node, parent);\n }\n }\n\n function traverseNode(node: Node, parent: Node | null) {\n const visit = visitor[node.type];\n\n if (visit && typeof visit === 'function') {\n (visit as Visit)(node, parent);\n }\n if (visit && (visit as EnterExit).enter) {\n (visit as EnterExit).enter!(node, parent);\n }\n\n switch (node.type) {\n case NodeType.Document:\n traverseArray((node as Document).items, node);\n break;\n\n case NodeType.Table:\n traverseNode((node as Table).key, node);\n traverseArray((node as Table).items, node);\n break;\n case NodeType.TableKey:\n traverseNode((node as TableKey).item, node);\n break;\n\n case NodeType.TableArray:\n traverseNode((node as TableArray).key, node);\n traverseArray((node as TableArray).items, node);\n break;\n case NodeType.TableArrayKey:\n traverseNode((node as TableArrayKey).item, node);\n break;\n\n case NodeType.KeyValue:\n traverseNode((node as KeyValue).key, node);\n traverseNode((node as KeyValue).value, node);\n break;\n\n case NodeType.InlineArray:\n traverseArray((node as InlineArray).items, node);\n break;\n case NodeType.InlineItem:\n traverseNode((node as InlineItem).item, node);\n break;\n\n case NodeType.InlineTable:\n traverseArray((node as InlineTable).items, node);\n break;\n\n case NodeType.Key:\n case NodeType.String:\n case NodeType.Integer:\n case NodeType.Float:\n case NodeType.Boolean:\n case NodeType.DateTime:\n case NodeType.Comment:\n break;\n\n default:\n throw new Error(`Unrecognized node type \"${node.type}\"`);\n }\n\n if (visit && (visit as EnterExit).exit) {\n (visit as EnterExit).exit!(node, parent);\n }\n }\n}\n","import {\n NodeType,\n Node,\n Document,\n Key,\n Value,\n InlineArray,\n InlineArrayItem,\n InlineTableItem,\n isKeyValue,\n isTable,\n isTableArray,\n isInlineTable,\n isInlineArray,\n hasItems,\n hasItem,\n isComment,\n isDocument,\n InlineTable,\n TableArray,\n Table,\n KeyValue,\n Comment,\n InlineItem,\n isInlineItem,\n Block,\n isBlock\n} from './ast';\nimport { Span, getSpan, clonePosition } from './location';\nimport { last } from './utils';\nimport traverse from './traverse';\n\nexport type Root = Document | Node;\n\n// Store line and column offsets per node\n//\n// Some offsets are applied on enter (e.g. shift child items and next items)\n// Others are applied on exit (e.g. shift next items)\ntype Offsets = WeakMap;\n\nconst enter_offsets: WeakMap = new WeakMap();\nconst getEnter = (root: Root) => {\n if (!enter_offsets.has(root)) {\n enter_offsets.set(root, new WeakMap());\n }\n return enter_offsets.get(root)!;\n};\n\nconst exit_offsets: WeakMap = new WeakMap();\nconst getExit = (root: Root) => {\n if (!exit_offsets.has(root)) {\n exit_offsets.set(root, new WeakMap());\n }\n return exit_offsets.get(root)!;\n};\n\nexport function replace(root: Root, parent: Node, existing: Node, replacement: Node) {\n // First, replace existing node\n // (by index for items, item, or key/value)\n if (hasItems(parent)) {\n const index = parent.items.indexOf(existing);\n if (index < 0) throw new Error(`Could not find existing item in parent node for replace`);\n\n parent.items.splice(index, 1, replacement);\n } else if (hasItem(parent)) {\n parent.item = replacement;\n } else if (isKeyValue(parent)) {\n if (parent.key === existing) {\n parent.key = replacement as Key;\n } else {\n parent.value = replacement as Value;\n }\n } else {\n throw new Error(`Unsupported parent type \"${parent.type}\" for replace`);\n }\n\n // Shift the replacement node into the same start position as existing\n const shift = {\n lines: existing.loc.start.line - replacement.loc.start.line,\n columns: existing.loc.start.column - replacement.loc.start.column\n };\n shiftNode(replacement, shift);\n\n // Apply offsets after replacement node\n const existing_span = getSpan(existing.loc);\n const replacement_span = getSpan(replacement.loc);\n const offset = {\n lines: replacement_span.lines - existing_span.lines,\n columns: replacement_span.columns - existing_span.columns\n };\n\n addOffset(offset, getExit(root), replacement, existing);\n}\n\nexport function insert(root: Root, parent: Node, child: Node, index?: number) {\n if (!hasItems(parent)) {\n throw new Error(`Unsupported parent type \"${(parent as Node).type}\" for insert`);\n }\n\n index = index != null ? index : parent.items.length;\n\n let shift: Span;\n let offset: Span;\n if (isInlineArray(parent) || isInlineTable(parent)) {\n ({ shift, offset } = insertInline(parent, child as InlineItem, index));\n } else {\n ({ shift, offset } = insertOnNewLine(\n parent as Document | Table | TableArray,\n child as KeyValue | Comment,\n index\n ));\n }\n\n shiftNode(child, shift);\n\n // The child element is placed relative to the previous element,\n // if the previous element has an offset, need to position relative to that\n // -> Move previous offset to child's offset\n const previous = parent.items[index - 1];\n const previous_offset = previous && getExit(root).get(previous);\n if (previous_offset) {\n offset.lines += previous_offset.lines;\n offset.columns += previous_offset.columns;\n\n // Account for comma overlay\n //\n // a = [b, e]\n // a = [b, c, e]\n // ^---^\n // a = [b, c, d, e]\n // ^---^\n if (isInlineItem(child) && previous && parent.items[index + 1]) {\n offset.columns -= 2;\n }\n\n getExit(root).delete(previous!);\n }\n\n const offsets = getExit(root);\n offsets.set(child, offset);\n}\n\nfunction insertOnNewLine(\n parent: Document | Table | TableArray,\n child: Block,\n index: number\n): { shift: Span; offset: Span } {\n if (!isBlock(child)) {\n throw new Error(`Incompatible child type \"${(child as Node).type}\"`);\n }\n\n const previous = parent.items[index - 1];\n const use_first_line = isDocument(parent) && !parent.items.length;\n\n parent.items.splice(index, 0, child);\n\n // Set start location from previous item or start of array\n // (previous is undefined for empty array or inserting at first item)\n const start = previous\n ? {\n line: previous.loc.end.line,\n column: !isComment(previous) ? previous.loc.start.column : parent.loc.start.column\n }\n : clonePosition(parent.loc.start);\n\n const is_block = isTable(child) || isTableArray(child);\n let leading_lines = 0;\n if (use_first_line) {\n // 0 leading lines\n } else if (is_block) {\n leading_lines = 2;\n } else {\n leading_lines = 1;\n }\n start.line += leading_lines;\n\n const shift = {\n lines: start.line - child.loc.start.line,\n columns: start.column - child.loc.start.column\n };\n\n // Apply offsets after child node\n const child_span = getSpan(child.loc);\n const offset = {\n lines: child_span.lines + (leading_lines - 1),\n columns: child_span.columns\n };\n\n return { shift, offset };\n}\n\nfunction insertInline(\n parent: InlineArray | InlineTable,\n child: InlineItem,\n index: number\n): { shift: Span; offset: Span } {\n if (!isInlineItem(child)) {\n throw new Error(`Incompatible child type \"${(child as Node).type}\"`);\n }\n\n // Store preceding node and insert\n const previous = index != null ? parent.items[index - 1] : last(parent.items);\n const is_last = index == null || index === parent.items.length;\n\n parent.items.splice(index, 0, child);\n\n // Add commas as-needed\n const leading_comma = !!previous;\n const trailing_comma = !is_last;\n const last_comma = is_last && child.comma === true;\n if (leading_comma) {\n previous!.comma = true;\n }\n if (trailing_comma) {\n child.comma = true;\n }\n\n // Use a new line for documents, children of Table/TableArray,\n // and if an inline table is using new lines\n const use_new_line = isInlineArray(parent) && perLine(parent);\n\n // Set start location from previous item or start of array\n // (previous is undefined for empty array or inserting at first item)\n const start = previous\n ? {\n line: previous.loc.end.line,\n column: use_new_line\n ? !isComment(previous)\n ? previous.loc.start.column\n : parent.loc.start.column\n : previous.loc.end.column\n }\n : clonePosition(parent.loc.start);\n\n let leading_lines = 0;\n if (use_new_line) {\n leading_lines = 1;\n } else {\n const skip_comma = 2;\n const skip_bracket = 1;\n start.column += leading_comma ? skip_comma : skip_bracket;\n }\n start.line += leading_lines;\n\n const shift = {\n lines: start.line - child.loc.start.line,\n columns: start.column - child.loc.start.column\n };\n\n // Apply offsets after child node\n const child_span = getSpan(child.loc);\n const offset = {\n lines: child_span.lines + (leading_lines - 1),\n columns: child_span.columns + (leading_comma || trailing_comma ? 2 : 0) + (last_comma ? 1 : 0)\n };\n\n return { shift, offset };\n}\n\nexport function remove(root: Root, parent: Node, node: Node) {\n // Remove an element from the parent's items\n // (supports Document, Table, TableArray, InlineTable, and InlineArray\n //\n // X\n // [ 1, 2, 3 ]\n // ^-^\n // -> Remove element 2 and apply 0,-3 offset to 1\n //\n // [table]\n // a = 1\n // b = 2 # X\n // c = 3\n // -> Remove element 2 and apply -1,0 offset to 1\n if (!hasItems(parent)) {\n throw new Error(`Unsupported parent type \"${parent.type}\" for remove`);\n }\n\n let index = parent.items.indexOf(node);\n if (index < 0) {\n // Try again, looking at child items for nodes like InlineArrayItem\n index = parent.items.findIndex(item => hasItem(item) && item.item === node);\n\n if (index < 0) {\n throw new Error('Could not find node in parent for removal');\n }\n\n node = parent.items[index];\n }\n\n const previous = parent.items[index - 1];\n let next = parent.items[index + 1];\n\n // Remove node\n parent.items.splice(index, 1);\n let removed_span = getSpan(node.loc);\n\n // Remove an associated comment that appears on the same line\n //\n // [table]\n // a = 1\n // b = 2 # remove this too\n // c = 3\n //\n // TODO InlineTable - this only applies to comments in Table/TableArray\n if (next && isComment(next) && next.loc.start.line === node.loc.end.line) {\n // Add comment to removed\n removed_span = getSpan({ start: node.loc.start, end: next.loc.end });\n\n // Shift to next item\n // (use same index since node has already been removed)\n next = parent.items[index + 1];\n\n // Remove comment\n parent.items.splice(index, 1);\n }\n\n // For inline tables and arrays, check whether the line should be kept\n const is_inline = previous && isInlineItem(previous);\n const previous_on_same_line = previous && previous.loc.end.line === node.loc.start.line;\n const next_on_sameLine = next && next.loc.start.line === node.loc.end.line;\n const keep_line = is_inline && (previous_on_same_line || next_on_sameLine);\n\n const offset = {\n lines: -(removed_span.lines - (keep_line ? 1 : 0)),\n columns: -removed_span.columns\n };\n\n // Offset for comma and remove comma from previous (if-needed)\n if (is_inline && previous_on_same_line) {\n offset.columns -= 2;\n }\n if (is_inline && previous && !next) {\n (previous as InlineArrayItem | InlineTableItem).comma = false;\n }\n\n // Apply offsets after preceding node or before children of parent node\n const target = previous || parent;\n const target_offsets = previous ? getExit(root) : getEnter(root);\n const node_offsets = getExit(root);\n const previous_offset = target_offsets.get(target);\n if (previous_offset) {\n offset.lines += previous_offset.lines;\n offset.columns += previous_offset.columns;\n }\n const removed_offset = node_offsets.get(node);\n if (removed_offset) {\n offset.lines += removed_offset.lines;\n offset.columns += removed_offset.columns;\n }\n\n target_offsets.set(target, offset);\n}\n\nexport function applyBracketSpacing(\n root: Root,\n node: InlineArray | InlineTable,\n bracket_spacing: boolean = true\n) {\n // Can only add bracket spacing currently\n if (!bracket_spacing) return;\n if (!node.items.length) return;\n\n // Apply enter to node so that items are affected\n addOffset({ lines: 0, columns: 1 }, getEnter(root), node);\n\n // Apply exit to last node in items\n const last_item = last(node.items as Node[])!;\n addOffset({ lines: 0, columns: 1 }, getExit(root), last_item);\n}\n\nexport function applyTrailingComma(\n root: Root,\n node: InlineArray | InlineTable,\n trailing_commas: boolean = false\n) {\n // Can only add trailing comma currently\n if (!trailing_commas) return;\n if (!node.items.length) return;\n\n const last_item = last(node.items)!;\n last_item.comma = true;\n\n addOffset({ lines: 0, columns: 1 }, getExit(root), last_item);\n}\n\nexport function applyWrites(root: Node) {\n const enter = getEnter(root);\n const exit = getExit(root);\n\n const offset: { lines: number; columns: { [index: number]: number } } = {\n lines: 0,\n columns: {}\n };\n\n function shiftStart(node: Node) {\n node.loc.start.line += offset.lines;\n node.loc.start.column += offset.columns[node.loc.start.line] || 0;\n\n const entering = enter.get(node);\n if (entering) {\n offset.lines += entering.lines;\n offset.columns[node.loc.start.line] =\n (offset.columns[node.loc.start.line] || 0) + entering.columns;\n }\n }\n function shiftEnd(node: Node) {\n node.loc.end.line += offset.lines;\n node.loc.end.column += offset.columns[node.loc.end.line] || 0;\n\n const exiting = exit.get(node);\n if (exiting) {\n offset.lines += exiting.lines;\n offset.columns[node.loc.end.line] =\n (offset.columns[node.loc.end.line] || 0) + exiting.columns;\n }\n }\n const shiftLocation = {\n enter: shiftStart,\n exit: shiftEnd\n };\n\n traverse(root, {\n [NodeType.Document]: shiftLocation,\n [NodeType.Table]: shiftLocation,\n [NodeType.TableArray]: shiftLocation,\n [NodeType.InlineTable]: shiftLocation,\n [NodeType.InlineArray]: shiftLocation,\n\n [NodeType.InlineItem]: shiftLocation,\n [NodeType.TableKey]: shiftLocation,\n [NodeType.TableArrayKey]: shiftLocation,\n\n [NodeType.KeyValue]: {\n enter(node) {\n const start_line = node.loc.start.line + offset.lines;\n const key_offset = exit.get(node.key);\n node.equals += (offset.columns[start_line] || 0) + (key_offset ? key_offset.columns : 0);\n\n shiftStart(node);\n },\n exit: shiftEnd\n },\n\n [NodeType.Key]: shiftLocation,\n [NodeType.String]: shiftLocation,\n [NodeType.Integer]: shiftLocation,\n [NodeType.Float]: shiftLocation,\n [NodeType.Boolean]: shiftLocation,\n [NodeType.DateTime]: shiftLocation,\n [NodeType.Comment]: shiftLocation\n });\n\n enter_offsets.delete(root);\n exit_offsets.delete(root);\n}\n\nexport function shiftNode(\n node: Node,\n span: Span,\n options: { first_line_only?: boolean } = {}\n): Node {\n const { first_line_only = false } = options;\n const start_line = node.loc.start.line;\n const { lines, columns } = span;\n const move = (node: Node) => {\n if (!first_line_only || node.loc.start.line === start_line) {\n node.loc.start.column += columns;\n node.loc.end.column += columns;\n }\n node.loc.start.line += lines;\n node.loc.end.line += lines;\n };\n\n traverse(node, {\n [NodeType.Table]: move,\n [NodeType.TableKey]: move,\n [NodeType.TableArray]: move,\n [NodeType.TableArrayKey]: move,\n [NodeType.KeyValue](node) {\n move(node);\n node.equals += columns;\n },\n [NodeType.Key]: move,\n [NodeType.String]: move,\n [NodeType.Integer]: move,\n [NodeType.Float]: move,\n [NodeType.Boolean]: move,\n [NodeType.DateTime]: move,\n [NodeType.InlineArray]: move,\n [NodeType.InlineItem]: move,\n [NodeType.InlineTable]: move,\n [NodeType.Comment]: move\n });\n\n return node;\n}\n\nfunction perLine(array: InlineArray): boolean {\n if (!array.items.length) return false;\n\n const span = getSpan(array.loc);\n return span.lines > array.items.length;\n}\n\nfunction addOffset(offset: Span, offsets: Offsets, node: Node, from?: Node) {\n const previous_offset = offsets.get(from || node);\n if (previous_offset) {\n offset.lines += previous_offset.lines;\n offset.columns += previous_offset.columns;\n }\n\n offsets.set(node, offset);\n}\n","import {\n NodeType,\n Document,\n Table,\n TableKey,\n TableArray,\n TableArrayKey,\n Value,\n KeyValue,\n Key,\n String,\n Integer,\n Float,\n Boolean,\n DateTime,\n InlineArray,\n InlineItem,\n InlineTable,\n Comment\n} from './ast';\nimport { zero, cloneLocation, clonePosition } from './location';\nimport { shiftNode } from './writer';\n\nexport function generateDocument(): Document {\n return {\n type: NodeType.Document,\n loc: { start: zero(), end: zero() },\n items: []\n };\n}\n\nexport function generateTable(key: string[]): Table {\n const table_key = generateTableKey(key);\n\n return {\n type: NodeType.Table,\n loc: cloneLocation(table_key.loc),\n key: table_key,\n items: []\n };\n}\n\nexport function generateTableKey(key: string[]): TableKey {\n const raw = keyValueToRaw(key);\n\n return {\n type: NodeType.TableKey,\n loc: {\n start: zero(),\n end: { line: 1, column: raw.length + 2 }\n },\n item: {\n type: NodeType.Key,\n loc: {\n start: { line: 1, column: 1 },\n end: { line: 1, column: raw.length + 1 }\n },\n value: key,\n raw\n }\n };\n}\n\nexport function generateTableArray(key: string[]): TableArray {\n const table_array_key = generateTableArrayKey(key);\n\n return {\n type: NodeType.TableArray,\n loc: cloneLocation(table_array_key.loc),\n key: table_array_key,\n items: []\n };\n}\n\nexport function generateTableArrayKey(key: string[]): TableArrayKey {\n const raw = keyValueToRaw(key);\n\n return {\n type: NodeType.TableArrayKey,\n loc: {\n start: zero(),\n end: { line: 1, column: raw.length + 4 }\n },\n item: {\n type: NodeType.Key,\n loc: {\n start: { line: 1, column: 2 },\n end: { line: 1, column: raw.length + 2 }\n },\n value: key,\n raw\n }\n };\n}\n\nexport function generateKeyValue(key: string[], value: Value): KeyValue {\n const key_node = generateKey(key);\n const { column } = key_node.loc.end;\n\n const equals = column + 1;\n\n shiftNode(\n value,\n { lines: 0, columns: column + 3 - value.loc.start.column },\n { first_line_only: true }\n );\n\n return {\n type: NodeType.KeyValue,\n loc: {\n start: clonePosition(key_node.loc.start),\n end: clonePosition(value.loc.end)\n },\n key: key_node,\n equals,\n value\n };\n}\n\nconst IS_BARE_KEY = /[\\w,\\d,\\_,\\-]+/;\nfunction keyValueToRaw(value: string[]): string {\n return value.map(part => (IS_BARE_KEY.test(part) ? part : JSON.stringify(part))).join('.');\n}\n\nexport function generateKey(value: string[]): Key {\n const raw = keyValueToRaw(value);\n\n return {\n type: NodeType.Key,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateString(value: string): String {\n const raw = JSON.stringify(value);\n\n return {\n type: NodeType.String,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateInteger(value: number): Integer {\n const raw = value.toString();\n\n return {\n type: NodeType.Integer,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateFloat(value: number): Float {\n const raw = value.toString();\n\n return {\n type: NodeType.Float,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateBoolean(value: boolean): Boolean {\n return {\n type: NodeType.Boolean,\n loc: { start: zero(), end: { line: 1, column: value ? 4 : 5 } },\n value\n };\n}\n\nexport function generateDateTime(value: Date): DateTime {\n const raw = value.toISOString();\n\n return {\n type: NodeType.DateTime,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateInlineArray(): InlineArray {\n return {\n type: NodeType.InlineArray,\n loc: { start: zero(), end: { line: 1, column: 2 } },\n items: []\n };\n}\n\nexport function generateInlineItem(item: KeyValue | Value): InlineItem {\n return {\n type: NodeType.InlineItem,\n loc: cloneLocation(item.loc),\n item,\n comma: false\n };\n}\n\nexport function generateInlineTable(): InlineTable {\n return {\n type: NodeType.InlineTable,\n loc: { start: zero(), end: { line: 1, column: 2 } },\n items: []\n };\n}\n\nexport function generateComment(comment: string): Comment {\n if (!comment.startsWith('#')) comment = `# ${comment}`;\n\n return {\n type: NodeType.Comment,\n loc: { start: zero(), end: { line: 1, column: comment.length } },\n raw: comment\n };\n}\n","import {\n KeyValue,\n Table,\n InlineTable,\n TableArray,\n InlineArray,\n isInlineTable,\n isInlineArray,\n isKeyValue,\n Document\n} from './ast';\nimport { generateTable, generateDocument, generateTableArray } from './generate';\nimport { insert, remove, applyWrites, shiftNode } from './writer';\n\nexport interface Format {\n printWidth?: number;\n tabWidth?: number;\n useTabs?: boolean;\n trailingComma?: boolean;\n bracketSpacing?: boolean;\n}\n\nexport function formatTopLevel(document: Document): Document {\n const move_to_top_level = document.items.filter(item => {\n if (!isKeyValue(item)) return false;\n\n const is_inline_table = isInlineTable(item.value);\n const is_inline_array =\n isInlineArray(item.value) &&\n item.value.items.length &&\n isInlineTable(item.value.items[0].item);\n\n return is_inline_table || is_inline_array;\n }) as KeyValue[];\n\n move_to_top_level.forEach(node => {\n remove(document, document, node);\n\n if (isInlineTable(node.value)) {\n insert(document, document, formatTable(node));\n } else {\n formatTableArray(node).forEach(table_array => {\n insert(document, document, table_array);\n });\n }\n });\n\n applyWrites(document);\n return document;\n}\n\nfunction formatTable(key_value: KeyValue): Table {\n const table = generateTable(key_value.key.value);\n\n for (const item of (key_value.value as InlineTable).items) {\n insert(table, table, item.item);\n }\n\n applyWrites(table);\n return table;\n}\n\nfunction formatTableArray(key_value: KeyValue): TableArray[] {\n const root = generateDocument();\n\n for (const inline_array_item of (key_value.value as InlineArray).items) {\n const table_array = generateTableArray(key_value.key.value);\n insert(root, root, table_array);\n\n for (const inline_table_item of (inline_array_item.item as InlineTable).items) {\n insert(root, table_array, inline_table_item.item);\n }\n }\n\n applyWrites(root);\n return root.items as TableArray[];\n}\n\nexport function formatPrintWidth(document: Document, format: Format): Document {\n // TODO\n return document;\n}\n\nexport function formatEmptyLines(document: Document): Document {\n let shift = 0;\n let previous = 0;\n for (const item of document.items) {\n if (previous === 0 && item.loc.start.line > 1) {\n // Remove leading newlines\n shift = 1 - item.loc.start.line;\n } else if (item.loc.start.line + shift > previous + 2) {\n shift += previous + 2 - (item.loc.start.line + shift);\n }\n\n shiftNode(item, {\n lines: shift,\n columns: 0\n });\n previous = item.loc.end.line;\n }\n\n return document;\n}\n","import { Value, KeyValue, Document, InlineArray, InlineTable } from './ast';\nimport {\n generateDocument,\n generateKeyValue,\n generateInlineItem,\n generateString,\n generateInteger,\n generateFloat,\n generateBoolean,\n generateDateTime,\n generateInlineArray,\n generateInlineTable\n} from './generate';\nimport { Format, formatTopLevel, formatPrintWidth, formatEmptyLines } from './format';\nimport { isObject, isString, isInteger, isFloat, isBoolean, isDate, pipe } from './utils';\nimport { insert, applyWrites, applyBracketSpacing, applyTrailingComma } from './writer';\n\nconst default_format = {\n printWidth: 80,\n trailingComma: false,\n bracketSpacing: true\n};\n\nexport default function parseJS(value: any, format: Format = {}): Document {\n format = Object.assign({}, default_format, format);\n value = toJSON(value);\n\n const document = generateDocument();\n for (const item of walkObject(value, format)) {\n insert(document, document, item);\n }\n applyWrites(document);\n\n // Heuristics:\n // 1. Top-level objects/arrays should be tables/table arrays\n // 2. Convert objects/arrays to tables/table arrays based on print width\n const formatted = pipe(\n document,\n formatTopLevel,\n document => formatPrintWidth(document, format),\n formatEmptyLines\n );\n\n return formatted;\n}\n\nfunction* walkObject(object: any, format: Format): IterableIterator {\n for (const key of Object.keys(object)) {\n yield generateKeyValue([key], walkValue(object[key], format));\n }\n}\n\nfunction walkValue(value: any, format: Format): Value {\n if (value == null) {\n throw new Error('\"null\" and \"undefined\" values are not supported');\n }\n\n if (isString(value)) {\n return generateString(value);\n } else if (isInteger(value)) {\n return generateInteger(value);\n } else if (isFloat(value)) {\n return generateFloat(value);\n } else if (isBoolean(value)) {\n return generateBoolean(value);\n } else if (isDate(value)) {\n return generateDateTime(value);\n } else if (Array.isArray(value)) {\n return walkInlineArray(value, format);\n } else {\n return walkInlineTable(value, format);\n }\n}\n\nfunction walkInlineArray(value: Array, format: Format): InlineArray {\n const inline_array = generateInlineArray();\n for (const element of value) {\n const item = walkValue(element, format);\n const inline_array_item = generateInlineItem(item);\n\n insert(inline_array, inline_array, inline_array_item);\n }\n applyBracketSpacing(inline_array, inline_array, format.bracketSpacing);\n applyTrailingComma(inline_array, inline_array, format.trailingComma);\n applyWrites(inline_array);\n\n return inline_array;\n}\n\nfunction walkInlineTable(value: object, format: Format): InlineTable | Value {\n value = toJSON(value);\n if (!isObject(value)) return walkValue(value, format);\n\n const inline_table = generateInlineTable();\n const items = [...walkObject(value, format)];\n for (const item of items) {\n const inline_table_item = generateInlineItem(item);\n\n insert(inline_table, inline_table, inline_table_item);\n }\n applyBracketSpacing(inline_table, inline_table, format.bracketSpacing);\n applyTrailingComma(inline_table, inline_table, format.trailingComma);\n applyWrites(inline_table);\n\n return inline_table;\n}\n\nfunction toJSON(value: any): any {\n return value && !isDate(value) && typeof value.toJSON === 'function' ? value.toJSON() : value;\n}\n","import { NodeType, AST } from './ast';\nimport traverse from './traverse';\nimport { Location } from './location';\nimport { SPACE } from './tokenizer';\n\nconst BY_NEW_LINE = /(\\r\\n|\\n)/g;\n\nexport default function toTOML(ast: AST, newline: string = '\\n'): string {\n const lines: string[] = [];\n\n traverse(ast, {\n [NodeType.TableKey](node) {\n const { start, end } = node.loc;\n\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '[');\n write(lines, { start: { line: end.line, column: end.column - 1 }, end }, ']');\n },\n [NodeType.TableArrayKey](node) {\n const { start, end } = node.loc;\n\n write(lines, { start, end: { line: start.line, column: start.column + 2 } }, '[[');\n write(lines, { start: { line: end.line, column: end.column - 2 }, end }, ']]');\n },\n\n [NodeType.KeyValue](node) {\n const {\n start: { line }\n } = node.loc;\n write(\n lines,\n { start: { line, column: node.equals }, end: { line, column: node.equals + 1 } },\n '='\n );\n },\n [NodeType.Key](node) {\n write(lines, node.loc, node.raw);\n },\n\n [NodeType.String](node) {\n write(lines, node.loc, node.raw);\n },\n [NodeType.Integer](node) {\n write(lines, node.loc, node.raw);\n },\n [NodeType.Float](node) {\n write(lines, node.loc, node.raw);\n },\n [NodeType.Boolean](node) {\n write(lines, node.loc, node.value.toString());\n },\n [NodeType.DateTime](node) {\n write(lines, node.loc, node.raw);\n },\n\n [NodeType.InlineArray](node) {\n const { start, end } = node.loc;\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '[');\n write(lines, { start: { line: end.line, column: end.column - 1 }, end }, ']');\n },\n\n [NodeType.InlineTable](node) {\n const { start, end } = node.loc;\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '{');\n write(lines, { start: { line: end.line, column: end.column - 1 }, end }, '}');\n },\n [NodeType.InlineItem](node) {\n if (!node.comma) return;\n\n const start = node.loc.end;\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, ',');\n },\n\n [NodeType.Comment](node) {\n write(lines, node.loc, node.raw);\n }\n });\n\n return lines.join(newline) + newline;\n}\n\nfunction write(lines: string[], loc: Location, raw: string) {\n const raw_lines = raw.split(BY_NEW_LINE);\n const expected_lines = loc.end.line - loc.start.line + 1;\n\n if (raw_lines.length !== expected_lines) {\n throw new Error(\n `Mismatch between location and raw string, expected ${expected_lines} lines for \"${raw}\"`\n );\n }\n\n for (let i = loc.start.line; i <= loc.end.line; i++) {\n const line = getLine(lines, i);\n const is_start_line = i === loc.start.line;\n const is_end_line = i === loc.end.line;\n\n const before = is_start_line\n ? line.substr(0, loc.start.column).padEnd(loc.start.column, SPACE)\n : '';\n const after = is_end_line ? line.substr(loc.end.column) : '';\n\n lines[i - 1] = before + raw_lines[i - loc.start.line] + after;\n }\n}\n\nfunction getLine(lines: string[], index: number): string {\n if (!lines[index - 1]) {\n for (let i = 0; i < index; i++) {\n if (!lines[i]) lines[i] = '';\n }\n }\n\n return lines[index - 1];\n}\n","import { Value, NodeType, Node, AST, isInlineTable } from './ast';\nimport traverse from './traverse';\nimport { last, blank, isDate, has } from './utils';\nimport ParseError from './parse-error';\n\nexport default function toJS(ast: AST, input: string = ''): any {\n const result = blank();\n const tables: Set = new Set();\n const table_arrays: Set = new Set();\n const defined: Set = new Set();\n let active: any = result;\n let previous_active: any;\n let skip = false;\n\n traverse(ast, {\n [NodeType.Table](node) {\n const key = node.key.item.value;\n try {\n validateKey(result, key, node.type, { tables, table_arrays, defined });\n } catch (err) {\n throw new ParseError(input, node.key.loc.start, err.message);\n }\n\n const joined_key = joinKey(key);\n tables.add(joined_key);\n defined.add(joined_key);\n\n active = ensureTable(result, key);\n },\n\n [NodeType.TableArray](node) {\n const key = node.key.item.value;\n\n try {\n validateKey(result, key, node.type, { tables, table_arrays, defined });\n } catch (err) {\n throw new ParseError(input, node.key.loc.start, err.message);\n }\n\n const joined_key = joinKey(key);\n table_arrays.add(joined_key);\n defined.add(joined_key);\n\n active = ensureTableArray(result, key);\n },\n\n [NodeType.KeyValue]: {\n enter(node) {\n if (skip) return;\n\n const key = node.key.value;\n try {\n validateKey(active, key, node.type, { tables, table_arrays, defined });\n } catch (err) {\n throw new ParseError(input, node.key.loc.start, err.message);\n }\n\n const value = toValue(node.value);\n const target = key.length > 1 ? ensureTable(active, key.slice(0, -1)) : active;\n\n target[last(key)!] = value;\n defined.add(joinKey(key));\n\n if (isInlineTable(node.value)) {\n previous_active = active;\n active = value;\n }\n },\n exit(node) {\n if (isInlineTable(node.value)) {\n active = previous_active;\n }\n }\n },\n\n [NodeType.InlineTable]: {\n enter() {\n // Handled by toValue\n skip = true;\n },\n exit() {\n skip = false;\n }\n }\n });\n\n return result;\n}\n\nexport function toValue(node: Value): any {\n switch (node.type) {\n case NodeType.InlineTable:\n const result = blank();\n\n node.items.forEach(({ item }) => {\n const key = item.key.value;\n const value = toValue(item.value);\n\n const target = key.length > 1 ? ensureTable(result, key.slice(0, -1)) : result;\n target[last(key)!] = value;\n });\n\n return result;\n\n case NodeType.InlineArray:\n return node.items.map(item => toValue(item.item as Value));\n\n case NodeType.String:\n case NodeType.Integer:\n case NodeType.Float:\n case NodeType.Boolean:\n case NodeType.DateTime:\n return node.value;\n\n default:\n throw new Error(`Unrecognized value type \"${(node as Node).type}\"`);\n }\n}\n\nfunction validateKey(\n object: any,\n key: string[],\n type: NodeType.Table | NodeType.TableArray | NodeType.KeyValue,\n state: { tables: Set; table_arrays: Set; defined: Set }\n) {\n // 1. Cannot override primitive value\n let parts: string[] = [];\n let index = 0;\n for (const part of key) {\n parts.push(part);\n\n if (!has(object, part)) return;\n if (isPrimitive(object[part])) {\n throw new Error(`Invalid key, a value has already been defined for ${parts.join('.')}`);\n }\n\n const joined_parts = joinKey(parts);\n if (Array.isArray(object[part]) && !state.table_arrays.has(joined_parts)) {\n throw new Error(`Invalid key, cannot add to a static array at ${joined_parts}`);\n }\n\n const next_is_last = index++ < key.length - 1;\n object = Array.isArray(object[part]) && next_is_last ? last(object[part]) : object[part];\n }\n\n const joined_key = joinKey(key);\n\n // 2. Cannot override table\n if (object && type === NodeType.Table && state.defined.has(joined_key)) {\n throw new Error(`Invalid key, a table has already been defined named ${joined_key}`);\n }\n\n // 3. Cannot add table array to static array or table\n if (object && type === NodeType.TableArray && !state.table_arrays.has(joined_key)) {\n throw new Error(`Invalid key, cannot add an array of tables to a table at ${joined_key}`);\n }\n}\n\nfunction ensureTable(object: any, key: string[]): any {\n const target = ensure(object, key.slice(0, -1));\n const last_key = last(key)!;\n if (!target[last_key]) {\n target[last_key] = blank();\n }\n\n return target[last_key];\n}\n\nfunction ensureTableArray(object: any, key: string[]): any {\n const target = ensure(object, key.slice(0, -1));\n const last_key = last(key)!;\n if (!target[last_key]) {\n target[last_key] = [];\n }\n\n const next = blank();\n target[last(key)!].push(next);\n\n return next;\n}\n\nfunction ensure(object: any, keys: string[]): any {\n return keys.reduce((active, subkey) => {\n if (!active[subkey]) {\n active[subkey] = blank();\n }\n return Array.isArray(active[subkey]) ? last(active[subkey]) : active[subkey];\n }, object);\n}\n\nfunction isPrimitive(value: any) {\n return typeof value !== 'object' && !isDate(value);\n}\n\nfunction joinKey(key: string[]): string {\n return key.join('.');\n}\n","import { isObject, datesEqual, stableStringify, merge } from './utils';\nimport { Path } from './find-by-path';\n\nexport enum ChangeType {\n Add = 'Add',\n Edit = 'Edit',\n Remove = 'Remove',\n Move = 'Move',\n Rename = 'Rename'\n}\n\nexport interface Add {\n type: ChangeType.Add;\n path: Path;\n}\nexport function isAdd(change: Change): change is Add {\n return change.type === ChangeType.Add;\n}\n\nexport interface Edit {\n type: ChangeType.Edit;\n path: Path;\n}\nexport function isEdit(change: Change): change is Edit {\n return change.type === ChangeType.Edit;\n}\n\nexport interface Remove {\n type: ChangeType.Remove;\n path: Path;\n}\nexport function isRemove(change: Change): change is Remove {\n return change.type === ChangeType.Remove;\n}\n\nexport interface Move {\n type: ChangeType.Move;\n path: Path;\n from: number;\n to: number;\n}\nexport function isMove(change: Change): change is Move {\n return change.type === ChangeType.Move;\n}\n\nexport interface Rename {\n type: ChangeType.Rename;\n path: Path;\n from: string;\n to: string;\n}\nexport function isRename(change: Change): change is Rename {\n return change.type === ChangeType.Rename;\n}\n\nexport type Change = Add | Edit | Remove | Move | Rename;\n\nexport default function diff(before: any, after: any, path: Path = []): Change[] {\n if (before === after || datesEqual(before, after)) {\n return [];\n }\n\n if (Array.isArray(before) && Array.isArray(after)) {\n return compareArrays(before, after, path);\n } else if (isObject(before) && isObject(after)) {\n return compareObjects(before, after, path);\n } else {\n return [\n {\n type: ChangeType.Edit,\n path\n }\n ];\n }\n}\n\nfunction compareObjects(before: any, after: any, path: Path = []): Change[] {\n let changes: Change[] = [];\n\n // 1. Get keys and stable values\n const before_keys = Object.keys(before);\n const before_stable = before_keys.map(key => stableStringify(before[key]));\n const after_keys = Object.keys(after);\n const after_stable = after_keys.map(key => stableStringify(after[key]));\n\n // Check for rename by seeing if object is in both before and after\n // and that key is no longer used in after\n const isRename = (stable: string, search: string[]) => {\n const index = search.indexOf(stable);\n if (index < 0) return false;\n\n const before_key = before_keys[before_stable.indexOf(stable)];\n return !after_keys.includes(before_key);\n };\n\n // 2. Check for changes, rename, and removed\n before_keys.forEach((key, index) => {\n const sub_path = path.concat(key);\n if (after_keys.includes(key)) {\n merge(changes, diff(before[key], after[key], sub_path));\n } else if (isRename(before_stable[index], after_stable)) {\n const to = after_keys[after_stable.indexOf(before_stable[index])];\n changes.push({\n type: ChangeType.Rename,\n path,\n from: key,\n to\n });\n } else {\n changes.push({\n type: ChangeType.Remove,\n path: sub_path\n });\n }\n });\n\n // 3. Check for additions\n after_keys.forEach((key, index) => {\n if (!before_keys.includes(key) && !isRename(after_stable[index], before_stable)) {\n changes.push({\n type: ChangeType.Add,\n path: path.concat(key)\n });\n }\n });\n\n return changes;\n}\n\nfunction compareArrays(before: any[], after: any[], path: Path = []): Change[] {\n let changes: Change[] = [];\n\n // 1. Convert arrays to stable objects\n const before_stable = before.map(stableStringify);\n const after_stable = after.map(stableStringify);\n\n // 2. Step through after array making changes to before array as-needed\n after_stable.forEach((value, index) => {\n const overflow = index >= before_stable.length;\n\n // Check if items are the same\n if (!overflow && before_stable[index] === value) {\n return;\n }\n\n // Check if item has been moved -> shift into place\n const from = before_stable.indexOf(value, index + 1);\n if (!overflow && from > -1) {\n changes.push({\n type: ChangeType.Move,\n path,\n from,\n to: index\n });\n\n const move = before_stable.splice(from, 1);\n before_stable.splice(index, 0, ...move);\n\n return;\n }\n\n // Check if item is removed -> assume it's been edited and replace\n const removed = !after_stable.includes(before_stable[index]);\n if (!overflow && removed) {\n merge(changes, diff(before[index], after[index], path.concat(index)));\n before_stable[index] = value;\n\n return;\n }\n\n // Add as new item and shift existing\n changes.push({\n type: ChangeType.Add,\n path: path.concat(index)\n });\n before_stable.splice(index, 0, value);\n });\n\n // 3. Remove any remaining overflow items\n for (let i = after_stable.length; i < before_stable.length; i++) {\n changes.push({\n type: ChangeType.Remove,\n path: path.concat(i)\n });\n }\n\n return changes;\n}\n","import { Node, isKeyValue, isTable, isTableArray, hasItems, isInlineItem, hasItem } from './ast';\nimport { arraysEqual, stableStringify } from './utils';\n\nexport type Path = Array;\n\nexport default function findByPath(node: Node, path: Path): Node {\n if (!path.length) return node;\n\n if (isKeyValue(node)) {\n return findByPath(node.value, path);\n }\n\n const indexes: { [key: string]: number } = {};\n let found;\n if (hasItems(node)) {\n node.items.some((item, index) => {\n try {\n let key: Path = [];\n if (isKeyValue(item)) {\n key = item.key.value;\n } else if (isTable(item)) {\n key = item.key.item.value;\n } else if (isTableArray(item)) {\n key = item.key.item.value;\n\n const key_string = stableStringify(key);\n if (!indexes[key_string]) {\n indexes[key_string] = 0;\n }\n const array_index = indexes[key_string]++;\n\n key = key.concat(array_index);\n } else if (isInlineItem(item) && isKeyValue(item.item)) {\n key = item.item.key.value;\n } else if (isInlineItem(item)) {\n key = [index];\n }\n\n if (key.length && arraysEqual(key, path.slice(0, key.length))) {\n found = findByPath(item, path.slice(key.length));\n return true;\n } else {\n return false;\n }\n } catch (err) {\n return false;\n }\n });\n }\n\n if (!found) {\n throw new Error(`Could not find node at path ${path.join('.')}`);\n }\n\n return found;\n}\n\nexport function tryFindByPath(node: Node, path: Path): Node | undefined {\n try {\n return findByPath(node, path);\n } catch (err) {}\n}\n\nexport function findParent(node: Node, path: Path): Node {\n let parent_path = path;\n let parent;\n while (parent_path.length && !parent) {\n parent_path = parent_path.slice(0, -1);\n parent = tryFindByPath(node, parent_path);\n }\n\n if (!parent) {\n throw new Error(`Count not find parent node for path ${path.join('.')}`);\n }\n\n return parent;\n}\n","import parseTOML from './parse-toml';\nimport parseJS from './parse-js';\nimport toJS from './to-js';\nimport toTOML from './to-toml';\nimport { Format } from './format';\nimport {\n isKeyValue,\n WithItems,\n KeyValue,\n isTable,\n Node,\n Document,\n isDocument,\n Block,\n NodeType,\n isTableArray,\n isInlineArray,\n hasItem,\n InlineItem\n} from './ast';\nimport diff, { Change, isAdd, isEdit, isRemove, isMove, isRename } from './diff';\nimport findByPath, { tryFindByPath, findParent } from './find-by-path';\nimport { last, isInteger } from './utils';\nimport { insert, replace, remove, applyWrites } from './writer';\n\nexport default function patch(existing: string, updated: any, format?: Format): string {\n const existing_ast = parseTOML(existing);\n const items = [...existing_ast];\n\n const existing_js = toJS(items);\n const existing_document: Document = {\n type: NodeType.Document,\n loc: { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } },\n items\n };\n\n const updated_document = parseJS(updated, format);\n const changes = diff(existing_js, updated);\n\n const patched_document = applyChanges(existing_document, updated_document, changes);\n\n return toTOML(patched_document.items);\n}\n\nfunction applyChanges(original: Document, updated: Document, changes: Change[]): Document {\n // Potential Changes:\n //\n // Add: Add key-value to object, add item to array\n // Edit: Change in value\n // Remove: Remove key-value from object, remove item from array\n // Move: Move item in array\n // Rename: Rename key in key-value\n //\n // Special consideration, inline comments need to move as-needed\n\n changes.forEach(change => {\n if (isAdd(change)) {\n const child = findByPath(updated, change.path);\n const parent_path = change.path.slice(0, -1);\n let index = last(change.path)! as number;\n\n let is_table_array = isTableArray(child);\n if (isInteger(index) && !parent_path.some(isInteger)) {\n const sibling = tryFindByPath(original, parent_path.concat(0));\n if (sibling && isTableArray(sibling)) {\n is_table_array = true;\n }\n }\n\n let parent: Node;\n if (isTable(child)) {\n parent = original;\n } else if (is_table_array) {\n parent = original;\n\n // The index needs to be updated to top-level items\n // to properly account for other items, comments, and nesting\n const document = original as Document;\n const before = tryFindByPath(document, parent_path.concat(index - 1)) as Block | undefined;\n const after = tryFindByPath(document, parent_path.concat(index)) as Block | undefined;\n if (after) {\n index = document.items.indexOf(after);\n } else if (before) {\n index = document.items.indexOf(before) + 1;\n } else {\n index = document.items.length;\n }\n } else {\n parent = findParent(original, change.path);\n if (isKeyValue(parent)) parent = parent.value;\n }\n\n if (isTableArray(parent) || isInlineArray(parent) || isDocument(parent)) {\n insert(original, parent, child, index);\n } else {\n insert(original, parent, child);\n }\n } else if (isEdit(change)) {\n let existing = findByPath(original, change.path);\n let replacement = findByPath(updated, change.path);\n let parent;\n\n if (isKeyValue(existing) && isKeyValue(replacement)) {\n // Edit for key-value means value changes\n parent = existing;\n existing = existing.value;\n replacement = replacement.value;\n } else {\n parent = findParent(original, change.path);\n }\n\n replace(original, parent, existing, replacement);\n } else if (isRemove(change)) {\n let parent = findParent(original, change.path);\n if (isKeyValue(parent)) parent = parent.value;\n\n const node = findByPath(original, change.path);\n\n remove(original, parent, node);\n } else if (isMove(change)) {\n let parent = findByPath(original, change.path);\n if (hasItem(parent)) parent = parent.item;\n if (isKeyValue(parent)) parent = parent.value;\n\n const node = (parent as WithItems).items[change.from];\n\n remove(original, parent, node);\n insert(original, parent, node, change.to);\n } else if (isRename(change)) {\n let parent = findByPath(original, change.path.concat(change.from)) as\n | KeyValue\n | InlineItem;\n let replacement = findByPath(updated, change.path.concat(change.to)) as\n | KeyValue\n | InlineItem;\n\n if (hasItem(parent)) parent = parent.item;\n if (hasItem(replacement)) replacement = replacement.item;\n\n replace(original, parent, parent.key, replacement.key);\n }\n });\n\n applyWrites(original);\n return original;\n}\n","import parseTOML from './parse-toml';\nimport parseJS from './parse-js';\nimport toTOML from './to-toml';\nimport toJS from './to-js';\nimport { Format } from './format';\n\nexport function parse(value: string): any {\n return toJS(parseTOML(value), value);\n}\n\nexport function stringify(value: any, format?: Format): string {\n const document = parseJS(value, format);\n return toTOML(document.items);\n}\n\nexport { default as patch } from './patch';\n"],"names":["NodeType","TokenType","isDocument","node","type","Document","isTable","Table","isTableKey","TableKey","isTableArray","TableArray","isTableArrayKey","TableArrayKey","isKeyValue","KeyValue","isInlineArray","InlineArray","isInlineItem","InlineItem","isInlineTable","InlineTable","isComment","Comment","hasItems","hasItem","isBlock","iterator","value","Symbol","Cursor","[object Object]","this","index","undefined","done","peeked","result","next","getSpan","location","lines","end","line","start","columns","column","createLocate","input","findLines","findPosition","Array","isArray","findIndex","line_index","getLine","position","length","substr","BY_NEW_LINE","indexes","match","exec","push","clonePosition","cloneLocation","zero","ParseError","Error","message","error_message","pointer","whitespace","super","count","character","repeat","IS_WHITESPACE","IS_NEW_LINE","DOUBLE_QUOTE","SINGLE_QUOTE","SPACE","ESCAPE","IS_VALID_LEADING_CHARACTER","tokenize","cursor","locate","test","specialCharacter","Bracket","Curly","Equal","Comma","Dot","comment","multiline_char","checkThree","multiline","string","raw","loc","peek","quotes","Literal","double_quoted","single_quoted","isFinished","next_item","current","check","last","values","blank","Object","create","isString","isInteger","isFloat","isBoolean","isDate","prototype","toString","call","isObject","isIterable","has","object","key","hasOwnProperty","arraysEqual","a","b","i","datesEqual","toISOString","pipe","fns","reduce","fn","stableStringify","keys","sort","map","JSON","stringify","join","merge","target","original_length","added_length","TRIPLE_DOUBLE_QUOTE","TRIPLE_SINGLE_QUOTE","LF","CRLF","IS_CRLF","IS_LF","IS_LEADING_NEW_LINE","IS_LINE_ENDING_BACKSLASH","parseString","startsWith","trim","trimLeadingWhitespace","lineEndingBackslash","escapeNewLines","unescape","escaped","json_escaped","replace","code_point","parseInt","as_string","String","fromCodePoint","parse","TRUE","FALSE","HAS_E","IS_DIVIDER","IS_INF","IS_NAN","IS_HEX","IS_OCTAL","IS_BINARY","IS_FULL_DATE","IS_FULL_TIME","parseTOML","tokens","walkBlock","table","keyValue","walkValue","boolean","datetime","float","integer","inlineTable","inline_array","comments","inlineArray","is_table","item","Key","dot","before","after","items","equals","Boolean","Date","local_date","split","DateTime","Infinity","Number","Float","Integer","radix","previous","comma","inline_item","additional_comments","traverse","ast","visitor","traverseArray","array","parent","traverseNode","visit","enter","exit","enter_offsets","WeakMap","getEnter","root","set","get","exit_offsets","getExit","existing","replacement","indexOf","splice","shiftNode","existing_span","replacement_span","addOffset","insert","child","shift","offset","insertInline","insertOnNewLine","previous_offset","delete","use_first_line","is_block","leading_lines","child_span","is_last","leading_comma","trailing_comma","last_comma","use_new_line","perLine","skip_comma","skip_bracket","remove","removed_span","is_inline","previous_on_same_line","next_on_sameLine","keep_line","target_offsets","node_offsets","removed_offset","applyBracketSpacing","bracket_spacing","last_item","applyTrailingComma","trailing_commas","applyWrites","shiftStart","entering","shiftEnd","exiting","shiftLocation","start_line","key_offset","span","options","first_line_only","move","offsets","from","generateDocument","generateTable","table_key","generateTableKey","keyValueToRaw","generateTableArray","table_array_key","generateTableArrayKey","generateKeyValue","key_node","generateKey","IS_BARE_KEY","part","generateString","generateInteger","generateFloat","generateBoolean","generateDateTime","generateInlineArray","generateInlineItem","generateInlineTable","formatTopLevel","document","filter","is_inline_table","is_inline_array","forEach","formatTable","formatTableArray","table_array","key_value","inline_array_item","inline_table_item","formatPrintWidth","format","formatEmptyLines","default_format","printWidth","trailingComma","bracketSpacing","parseJS","assign","toJSON","walkObject","walkInlineArray","walkInlineTable","element","inline_table","toTOML","newline","write","raw_lines","expected_lines","is_start_line","is_end_line","padEnd","toJS","tables","Set","table_arrays","defined","previous_active","active","skip","validateKey","err","joined_key","joinKey","add","ensureTable","ensureTableArray","toValue","slice","state","parts","isPrimitive","joined_parts","next_is_last","ensure","last_key","subkey","ChangeType","isAdd","change","Add","isEdit","Edit","isRemove","Remove","isMove","Move","isRename","Rename","diff","path","compareArrays","compareObjects","changes","before_keys","before_stable","after_keys","after_stable","stable","search","before_key","includes","sub_path","concat","to","overflow","removed","findByPath","found","some","key_string","array_index","tryFindByPath","findParent","parent_path","patch","updated","existing_js","applyChanges","original","is_table_array","sibling"],"mappings":"aAEA,IAAYA,SCEAC,mBD4BIC,WAAWC,GACzB,OAAOA,EAAKC,OAASJ,SAASK,kBAqBhBC,QAAQH,GACtB,OAAOA,EAAKC,OAASJ,SAASO,eAiBhBC,WAAWL,GACzB,OAAOA,EAAKC,OAASJ,SAASS,kBAqBhBC,aAAaP,GAC3B,OAAOA,EAAKC,OAASJ,SAASW,oBAgBhBC,gBAAgBT,GAC9B,OAAOA,EAAKC,OAASJ,SAASa,uBAmBhBC,WAAWX,GACzB,OAAOA,EAAKC,OAASJ,SAASe,kBAiGhBC,cAAcb,GAC5B,OAAOA,EAAKC,OAASJ,SAASiB,qBAiBhBC,aAAaf,GAC3B,OAAOA,EAAKC,OAASJ,SAASmB,oBAYhBC,cAAcjB,GAC5B,OAAOA,EAAKC,OAASJ,SAASqB,qBAyBhBC,UAAUnB,GACxB,OAAOA,EAAKC,OAASJ,SAASuB,iBAUhBC,SAASrB,GACvB,OACED,WAAWC,IACXG,QAAQH,IACRO,aAAaP,IACbiB,cAAcjB,IACda,cAAcb,YAOFsB,QAAQtB,GACtB,OAAOK,WAAWL,IAASS,gBAAgBT,IAASe,aAAaf,YAInDuB,QAAQvB,GACtB,OAAOW,WAAWX,IAASG,QAAQH,IAASO,aAAaP,IAASmB,UAAUnB,YE5T9DwB,SAAYC,GAC1B,OAAOA,EAAMC,OAAOF,mEFCtB,SAAY3B,GACVA,sBACAA,gBACAA,sBACAA,0BACAA,gCACAA,sBACAA,YACAA,kBACAA,oBACAA,gBACAA,oBACAA,sBACAA,4BACAA,0BACAA,4BACAA,oBAhBF,CAAYA,WAAAA,cEEZ,MAAqB8B,OAOnBC,YAAYJ,GACVK,KAAKL,SAAWA,EAChBK,KAAKC,OAAS,EACdD,KAAKJ,WAAQM,EACbF,KAAKG,MAAO,EACZH,KAAKI,OAAS,KAGhBL,OACE,GAAIC,KAAKG,KAAM,OAAOA,OAEtB,MAAME,EAASL,KAAKI,QAAUJ,KAAKL,SAASW,OAO5C,OALAN,KAAKC,OAAS,EACdD,KAAKJ,MAAQS,EAAOT,MACpBI,KAAKG,KAAOE,EAAOF,KACnBH,KAAKI,OAAS,KAEPC,EAGTN,OACE,OAAIC,KAAKG,KAAaA,OAClBH,KAAKI,OAAeJ,KAAKI,QAE7BJ,KAAKI,OAASJ,KAAKL,SAASW,OACrBN,KAAKI,QAGdL,CAACF,OAAOF,YACN,OAAOK,MAIX,SAASG,OACP,MAAO,CAAEP,WAAOM,EAAWC,MAAM,YC9BnBI,QAAQC,GACtB,MAAO,CACLC,MAAOD,EAASE,IAAIC,KAAOH,EAASI,MAAMD,KAAO,EACjDE,QAASL,EAASE,IAAII,OAASN,EAASI,MAAME,iBAKlCC,aAAaC,GAC3B,MAAMP,EAAQQ,UAAUD,GAExB,MAAO,CAACJ,EAAeF,KACd,CACLE,MAAOM,aAAaT,EAAOG,GAC3BF,IAAKQ,aAAaT,EAAOC,cAKfQ,aAAaF,EAA0Bf,GAarD,MAAMQ,EAAQU,MAAMC,QAAQJ,GAASA,EAAQC,UAAUD,GACjDL,EAAOF,EAAMY,UAAUC,GAAcA,GAAcrB,GAAS,EAGlE,MAAO,CAAEU,KAAAA,EAAMG,OAFAb,GAASQ,EAAME,EAAO,GAAK,GAAK,aAKjCY,QAAQP,EAAeQ,GACrC,MAAMf,EAAQQ,UAAUD,GAClBJ,EAAQH,EAAMe,EAASb,KAAO,IAAM,EACpCD,EAAMD,EAAMe,EAASb,KAAO,IAAMK,EAAMS,OAE9C,OAAOT,EAAMU,OAAOd,EAAOF,EAAME,YAGnBK,UAAUD,GAExB,MAAMW,EAAc,aACdC,EAAoB,GAE1B,IAAIC,EACJ,KAA4C,OAApCA,EAAQF,EAAYG,KAAKd,KAC/BY,EAAQG,KAAKF,EAAM5B,OAIrB,OAFA2B,EAAQG,KAAKf,EAAMS,OAAS,GAErBG,WAGOI,cAAcR,GAC5B,MAAO,CAAEb,KAAMa,EAASb,KAAMG,OAAQU,EAASV,iBAGjCmB,cAAczB,GAC5B,MAAO,CAAEI,MAAOoB,cAAcxB,EAASI,OAAQF,IAAKsB,cAAcxB,EAASE,MAG7E,SAAgBwB,OACd,MAAO,CAAEvB,KAAM,EAAGG,OAAQ,SCpFPqB,mBAAmBC,MAItCrC,YAAYiB,EAAeQ,EAAoBa,GAC7C,IAAIC,yBAAuCd,EAASb,SAASa,EAASV,OAAS,QAE/E,GAAIE,EAAO,CACT,MAAML,EAAOY,QAAQP,EAAOQ,GACtBe,KAAaC,WAAWhB,EAASV,WAEnCH,IAAM2B,MAAoB3B,MAAS4B,OAIzCE,MAFAH,GAAiBD,GAIjBrC,KAAKW,KAAOa,EAASb,KACrBX,KAAKc,OAASU,EAASV,QAI3B,SAIS0B,WAAWE,EAAeC,EAAoB,KACrD,OAAOA,EAAUC,OAAOF,IHzB1B,SAAYzE,GACVA,oBACAA,gBACAA,gBACAA,gBACAA,YACAA,oBACAA,oBAPF,CAAYA,YAAAA,eAgBL,MAAM4E,cAAgB,KAChBC,YAAc,YACdC,aAAe,IACfC,aAAe,IACfC,MAAQ,IACRC,OAAS,KAEhBC,2BAA6B,kCAElBC,SAASpC,GACxB,MAAMqC,EAAS,IAAIvD,OAAOH,SAASqB,IACnCqC,EAAO/C,OAEP,MAAMgD,EAASvC,aAAaC,GAE5B,MAAQqC,EAAOlD,MAAM,CACnB,GAAI0C,cAAcU,KAAKF,EAAOzD,aAEvB,GAAqB,MAAjByD,EAAOzD,OAAkC,MAAjByD,EAAOzD,YAElC4D,iBAAiBH,EAAQC,EAAQrF,UAAUwF,cAC5C,GAAqB,MAAjBJ,EAAOzD,OAAkC,MAAjByD,EAAOzD,YAClC4D,iBAAiBH,EAAQC,EAAQrF,UAAUyF,YAC5C,GAAqB,MAAjBL,EAAOzD,YACV4D,iBAAiBH,EAAQC,EAAQrF,UAAU0F,YAC5C,GAAqB,MAAjBN,EAAOzD,YACV4D,iBAAiBH,EAAQC,EAAQrF,UAAU2F,YAC5C,GAAqB,MAAjBP,EAAOzD,YACV4D,iBAAiBH,EAAQC,EAAQrF,UAAU4F,UAC5C,GAAqB,MAAjBR,EAAOzD,YAEVkE,QAAQT,EAAQC,OACjB,CACL,MAAMS,EACJC,WAAWhD,EAAOqC,EAAOpD,MAAO+C,eAChCgB,WAAWhD,EAAOqC,EAAOpD,MAAO8C,cAE9BgB,QAEIE,UAAUZ,EAAQC,EAAQS,EAAgB/C,SAE1CkD,OAAOb,EAAQC,EAAQtC,GAIjCqC,EAAO/C,QAIX,SAASkD,iBAAiBH,EAAwBC,EAAiBlF,GACjE,MAAO,CAAEA,KAAAA,EAAM+F,IAAKd,EAAOzD,MAAQwE,IAAKd,EAAOD,EAAOpD,MAAOoD,EAAOpD,MAAQ,IAG9E,SAAS6D,QAAQT,EAAwBC,GACvC,MAAM1C,EAAQyC,EAAOpD,MACrB,IAAIkE,EAAMd,EAAOzD,MACjB,MAAQyD,EAAOgB,OAAOlE,OAAS2C,YAAYS,KAAKF,EAAOgB,OAAOzE,QAC5DyD,EAAO/C,OACP6D,GAAOd,EAAOzD,MAKhB,MAAO,CACLxB,KAAMH,UAAUsB,QAChB4E,IAAAA,EACAC,IAAKd,EAAO1C,EAAOyC,EAAOpD,MAAQ,IAItC,SAASgE,UACPZ,EACAC,EACAS,EACA/C,GAEA,MAAMJ,EAAQyC,EAAOpD,MACrB,IAAIqE,EAASP,EAAiBA,EAAiBA,EAC3CI,EAAMG,EAOV,IAJAjB,EAAO/C,OACP+C,EAAO/C,OACP+C,EAAO/C,QAEC+C,EAAOlD,OAAS6D,WAAWhD,EAAOqC,EAAOpD,MAAO8D,IACtDI,GAAOd,EAAOzD,MACdyD,EAAO/C,OAGT,GAAI+C,EAAOlD,KACT,MAAM,IAAIgC,WACRnB,EACAE,aAAaF,EAAOqC,EAAOpD,kDACgBqE,0BAS/C,OALAH,GAAOG,EAEPjB,EAAO/C,OACP+C,EAAO/C,OAEA,CACLlC,KAAMH,UAAUsG,QAChBJ,IAAAA,EACAC,IAAKd,EAAO1C,EAAOyC,EAAOpD,MAAQ,IAItC,SAASiE,OAAOb,EAAwBC,EAAiBtC,GAsBvD,IAAKmC,2BAA2BI,KAAKF,EAAOzD,OAC1C,MAAM,IAAIuC,WACRnB,EACAE,aAAaF,EAAOqC,EAAOpD,iCACDoD,EAAOzD,mDAIrC,MAAMgB,EAAQyC,EAAOpD,MACrB,IAAIkE,EAAMd,EAAOzD,MACb4E,EAAgBnB,EAAOzD,QAAUmD,aACjC0B,EAAgBpB,EAAOzD,QAAUoD,aAErC,MAAM0B,EAAcrB,IAClB,GAAIA,EAAOgB,OAAOlE,KAAM,OAAO,EAC/B,MAAMwE,EAAYtB,EAAOgB,OAAOzE,MAEhC,QACI4E,GAAiBC,KAClB5B,cAAcU,KAAKoB,IACJ,MAAdA,GACc,MAAdA,GACc,MAAdA,GACc,MAAdA,GACc,MAAdA,IAIN,MAAQtB,EAAOlD,OAASuE,EAAWrB,KACjCA,EAAO/C,OAEH+C,EAAOzD,QAAUmD,eAAcyB,GAAiBA,GAChDnB,EAAOzD,QAAUoD,cAAiBwB,IAAeC,GAAiBA,GAEtEN,GAAOd,EAAOzD,OAEVyD,EAAOgB,OAAOlE,OARwB,CAS1C,IAAIwE,EAAYtB,EAAOgB,OAAOzE,MAI1B4E,GAAiBnB,EAAOzD,QAAUsD,SAChCyB,IAAc5B,cAChBoB,GAAOpB,aACPM,EAAO/C,QACEqE,IAAczB,SACvBiB,GAAOjB,OACPG,EAAO/C,SAKb,GAAIkE,GAAiBC,EACnB,MAAM,IAAItC,WACRnB,EACAE,aAAaF,EAAOJ,oCACa4D,EAAgBzB,aAAeC,gBAIpE,MAAO,CACL5E,KAAMH,UAAUsG,QAChBJ,IAAAA,EACAC,IAAKd,EAAO1C,EAAOyC,EAAOpD,MAAQ,IAItC,SAAS+D,WAAWhD,EAAe4D,EAAiBC,GAClD,OACE7D,EAAM4D,KAAaC,GACnB7D,EAAM4D,EAAU,KAAOC,GACvB7D,EAAM4D,EAAU,KAAOC,GACvBA,WIhOYC,KAAaC,GAC3B,OAAOA,EAAOA,EAAOtD,OAAS,GAKhC,SAAgBuD,QACd,OAAOC,OAAOC,OAAO,eAGPC,SAASvF,GACvB,MAAwB,iBAAVA,WAGAwF,UAAUxF,GACxB,MAAwB,iBAAVA,GAAsBA,EAAQ,GAAM,WAGpCyF,QAAQzF,GACtB,MAAwB,iBAAVA,IAAuBwF,UAAUxF,YAGjC0F,UAAU1F,GACxB,MAAwB,kBAAVA,WAGA2F,OAAO3F,GACrB,MAAiD,kBAA1CqF,OAAOO,UAAUC,SAASC,KAAK9F,YAGxB+F,SAAS/F,GACvB,OAAOA,GAA0B,iBAAVA,IAAuB2F,OAAO3F,KAAWuB,MAAMC,QAAQxB,YAGhEgG,WAAchG,GAC5B,OAAgB,MAATA,GAAmD,mBAA3BA,EAAMC,OAAOF,mBAG9BkG,IAAIC,EAAaC,GAC/B,OAAOd,OAAOO,UAAUQ,eAAeN,KAAKI,EAAQC,YAGtCE,YAAmBC,EAAYC,GAC7C,GAAID,EAAEzE,SAAW0E,EAAE1E,OAAQ,OAAO,EAElC,IAAK,IAAI2E,EAAI,EAAGA,EAAIF,EAAEzE,OAAQ2E,IAC5B,GAAIF,EAAEE,KAAOD,EAAEC,GAAI,OAAO,EAG5B,OAAO,WAGOC,WAAWH,EAAQC,GACjC,OAAOZ,OAAOW,IAAMX,OAAOY,IAAMD,EAAEI,gBAAkBH,EAAEG,uBAGzCC,KAAa3G,KAAkB4G,GAC7C,OAAOA,EAAIC,OAAO,CAAC7G,EAAO8G,IAAOA,EAAG9G,GAAQA,YAG9B+G,gBAAgBb,GAC9B,GAAIH,SAASG,GAAS,CAKpB,UAJmBb,OAAO2B,KAAKd,GAC5Be,OACAC,IAAIf,MAAUgB,KAAKC,UAAUjB,MAAQY,gBAAgBb,EAAOC,OAEzCkB,KAAK,QACtB,OAAI9F,MAAMC,QAAQ0E,OACZA,EAAOgB,IAAIH,iBAAiBM,KAAK,QAErCF,KAAKC,UAAUlB,YAIVoB,MAAcC,EAAkBpC,GAG9C,MAAMqC,EAAkBD,EAAO1F,OACzB4F,EAAetC,EAAOtD,OAC5B0F,EAAO1F,OAAS2F,EAAkBC,EAElC,IAAK,IAAIjB,EAAI,EAAGA,EAAIiB,EAAcjB,IAChCe,EAAOC,EAAkBhB,GAAKrB,EAAOqB,GC/EzC,MAAMkB,oBAAsB,MACtBC,oBAAsB,MACtBC,GAAK,MACLC,KAAO,SACPC,QAAU,QACVC,MAAQ,MACRC,oBAAsB,aACtBC,yBAA2B,6BAEjBC,YAAY3D,GAC1B,OAAIA,EAAI4D,WAAWR,qBACVhB,KACLyB,KAAK7D,EAAK,GACV8D,uBAEO9D,EAAI4D,WAAW/E,cACjBgF,KAAK7D,EAAK,GACRA,EAAI4D,WAAWT,qBACjBf,KACLyB,KAAK7D,EAAK,GACV8D,sBACAC,oBACAC,eACAC,UAEOjE,EAAI4D,WAAWhF,cACjBwD,KACLyB,KAAK7D,EAAK,GACViE,UAGKjE,WAIKiE,SAASC,GAGvB,MACMC,EAAeD,EAAQE,QADP,qBAC8B3I,IAClD,MAAM4I,EAAaC,SAAS7I,EAAM2I,QAAQ,MAAO,IAAK,IAChDG,EAAYC,OAAOC,cAAcJ,GAEvC,OAAOR,KAAKjB,KAAKC,UAAU0B,GAAY,KAGzC,OAAO3B,KAAK8B,UAAUP,MAOxB,SAASN,KAAKpI,EAAe8C,GAC3B,OAAO9C,EAAM8B,OAAOgB,EAAO9C,EAAM6B,OAAiB,EAARiB,GAG5C,SAASuF,sBAAsBrI,GAC7B,OAAOgI,oBAAoBrE,KAAK3D,GAASA,EAAM8B,OAAO,GAAK9B,EAG7D,SAASuI,eAAevI,GACtB,OAAOA,EAAM2I,QAAQb,QAASD,MAAMc,QAAQZ,MAAOH,IAGrD,SAASU,oBAAoBtI,GAC3B,OAAOA,EAAM2I,QAAQV,yBAA0B,ICzCjD,MAAMiB,KAAO,OACPC,MAAQ,QACRC,MAAQ,KACRC,WAAa,MACbC,OAAS,MACTC,OAAS,MACTC,OAAS,MACTC,SAAW,MACXC,UAAY,MACLC,aAAe,0BACfC,aAAe,mCAEHC,UAAUzI,GACjC,MAAM0I,EAAStG,SAASpC,GAClBqC,EAAS,IAAIvD,OAAO4J,GAE1B,MAAQrG,EAAO/C,OAAOH,YACbwJ,UAAUtG,EAAQrC,GAI7B,SAAU2I,UAAUtG,EAAuBrC,GACzC,GAAIqC,EAAOzD,MAAOxB,OAASH,UAAUsB,cAC7BuE,UAAQT,QACT,GAAIA,EAAOzD,MAAOxB,OAASH,UAAUwF,cACpCmG,MAAMvG,EAAQrC,OACf,CAAA,GAAIqC,EAAOzD,MAAOxB,OAASH,UAAUsG,QAG1C,MAAM,IAAIpC,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,2BACGyC,EAAOzD,MAAOxB,qDAL9ByL,SAASxG,EAAQrC,IAU5B,SAAU8I,UAAUzG,EAAuBrC,GACzC,GAAIqC,EAAOzD,MAAOxB,OAASH,UAAUsG,QAC/BlB,EAAOzD,MAAOuE,IAAI,KAAOpB,cAAgBM,EAAOzD,MAAOuE,IAAI,KAAOnB,mBAC9DkB,SAAOb,GACJA,EAAOzD,MAAOuE,MAAQ2E,MAAQzF,EAAOzD,MAAOuE,MAAQ4E,YACvDgB,QAAQ1G,GACLkG,aAAahG,KAAKF,EAAOzD,MAAOuE,MAAQqF,aAAajG,KAAKF,EAAOzD,MAAOuE,WAC3E6F,SAAS3G,EAAQrC,IAErBqC,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAU4F,KAChEqF,OAAO3F,KAAKF,EAAOzD,MAAOuE,MAC1BgF,OAAO5F,KAAKF,EAAOzD,MAAOuE,MACzB6E,MAAMzF,KAAKF,EAAOzD,MAAOuE,OAASiF,OAAO7F,KAAKF,EAAOzD,MAAOuE,WAEvD8F,MAAM5G,EAAQrC,SAEdkJ,QAAQ7G,QAEX,GAAIA,EAAOzD,MAAOxB,OAASH,UAAUyF,YACpCyG,YAAY9G,EAAQrC,OACrB,CAAA,GAAIqC,EAAOzD,MAAOxB,OAASH,UAAUwF,QAM1C,MAAM,IAAItB,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,kCACUyC,EAAOzD,MAAOxB,6CATO,CACnD,MAAOgM,EAAcC,GAAYC,YAAYjH,EAAQrC,SAE/CoJ,QACCC,IAUX,SAASvG,UAAQT,GAGf,MAAO,CACLjF,KAAMJ,SAASuB,QACf6E,IAAKf,EAAOzD,MAAOwE,IACnBD,IAAKd,EAAOzD,MAAOuE,KAIvB,SAASyF,MAAMvG,EAAuBrC,GAgBpC,MAAM5C,EACHiF,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAUwF,QAE3DzF,SAASO,MADTP,SAASW,WAET4L,EAAWnM,IAASJ,SAASO,MAEnC,GAAIgM,GAAkC,MAAtBlH,EAAOzD,MAAOuE,IAC5B,MAAM,IAAIhC,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,2CACmByC,EAAOzD,MAAOuE,OAGvD,IAAKoG,IAAmC,MAAtBlH,EAAOzD,MAAOuE,KAA4C,MAA7Bd,EAAOgB,OAAOzE,MAAOuE,KAClE,MAAM,IAAIhC,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,sDAC8ByC,EAAOzD,MAAOuE,IAAMd,EAAOgB,OAAOzE,MAAOuE,OAK7F,MAAM4B,EAAMwE,EACP,CACCnM,KAAMJ,SAASS,SACf2F,IAAKf,EAAOzD,MAAOwE,KAEpB,CACChG,KAAMJ,SAASa,cACfuF,IAAKf,EAAOzD,MAAOwE,KAOzB,GAHAf,EAAO/C,OACHlC,IAASJ,SAASW,YAAY0E,EAAO/C,OAErC+C,EAAOlD,KACT,MAAM,IAAIgC,WAAWnB,EAAO+E,EAAI3B,IAAKxD,MAAO,2CAU9C,IAPAmF,EAAIyE,KAAO,CACTpM,KAAMJ,SAASyM,IACfrG,IAAKnC,cAAcoB,EAAOzD,MAAOwE,KACjCD,IAAKd,EAAOzD,MAAOuE,IACnBvE,MAAO,CAACkI,YAAYzE,EAAOzD,MAAOuE,QAG5Bd,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAU4F,KAAK,CACzER,EAAO/C,OACP,MAAMoK,EAAMrH,EAAOzD,MAEnByD,EAAO/C,OACP,MAAMqK,EAAS,IAAI/H,OAAO8H,EAAItG,IAAIxD,MAAME,OAASiF,EAAIyE,KAAKpG,IAAI1D,IAAII,QAC5D8J,EAAQ,IAAIhI,OAAOS,EAAOzD,MAAOwE,IAAIxD,MAAME,OAAS4J,EAAItG,IAAI1D,IAAII,QAEtEiF,EAAIyE,KAAKpG,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAI1D,IACrCqF,EAAIyE,KAAKrG,QAAUwG,KAAUC,IAAQvH,EAAOzD,MAAOuE,MACnD4B,EAAIyE,KAAK5K,MAAMmC,KAAK+F,YAAYzE,EAAOzD,MAAOuE,MAKhD,GAFAd,EAAO/C,OAEHiK,IAAalH,EAAOlD,MAA8B,MAAtBkD,EAAOzD,MAAOuE,KAC5C,MAAM,IAAIhC,WACRnB,EACAqC,EAAOlD,KAAO4F,EAAIyE,KAAKpG,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAIxD,2CACdyC,EAAOlD,KAAO,cAAgBkD,EAAOzD,MAAOuE,OAGrF,IACGoG,IACAlH,EAAOlD,MACNkD,EAAOgB,OAAOlE,MACQ,MAAtBkD,EAAOzD,MAAOuE,KACe,MAA7Bd,EAAOgB,OAAOzE,MAAOuE,KAEvB,MAAM,IAAIhC,WACRnB,EACAqC,EAAOlD,MAAQkD,EAAOgB,OAAOlE,KAAO4F,EAAIyE,KAAKpG,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAIxD,sDAEvEyC,EAAOlD,MAAQkD,EAAOgB,OAAOlE,KACzB,cACAkD,EAAOzD,MAAOuE,IAAMd,EAAOgB,OAAOzE,MAAOuE,OAM9CoG,GAAUlH,EAAO/C,OACtByF,EAAI3B,IAAK1D,IAAM2C,EAAOzD,MAAOwE,IAAI1D,IAGjC,IAAImK,EAAmC,GACvC,MAAQxH,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAUwF,SACpEJ,EAAO/C,OACP4G,MAAM2D,EAAO,IAAIlB,UAAUtG,EAAQrC,KAGrC,MAAO,CACL5C,KAAMmM,EAAWvM,SAASO,MAAQP,SAASW,WAC3CyF,IAAK,CACHxD,MAAOoB,cAAc+D,EAAI3B,IAAKxD,OAC9BF,IACIsB,cADC6I,EAAMpJ,OACOoJ,EAAMA,EAAMpJ,OAAS,GAAG2C,IAAI1D,IAC5BqF,EAAI3B,IAAK1D,MAE7BqF,IAAKA,EACL8E,MAAAA,GAIJ,SAAShB,SAASxG,EAAuBrC,GAOvC,MAAM+E,EAAW,CACf3H,KAAMJ,SAASyM,IACfrG,IAAKnC,cAAcoB,EAAOzD,MAAOwE,KACjCD,IAAKd,EAAOzD,MAAOuE,IACnBvE,MAAO,CAACkI,YAAYzE,EAAOzD,MAAOuE,OAGpC,MAAQd,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAU4F,KACpER,EAAO/C,OACP+C,EAAO/C,OAEPyF,EAAI3B,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAI1D,IAChCqF,EAAI5B,SAAWd,EAAOzD,MAAOuE,MAC7B4B,EAAInG,MAAMmC,KAAK+F,YAAYzE,EAAOzD,MAAOuE,MAK3C,GAFAd,EAAO/C,OAEH+C,EAAOlD,MAAQkD,EAAOzD,MAAOxB,OAASH,UAAU0F,MAClD,MAAM,IAAIxB,WACRnB,EACAqC,EAAOlD,KAAO4F,EAAI3B,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAIxD,2CACTyC,EAAOlD,KAAO,cAAgBkD,EAAOzD,MAAOuE,OAIrF,MAAM2G,EAASzH,EAAOzD,MAAOwE,IAAIxD,MAAME,OAIvC,GAFAuC,EAAO/C,OAEH+C,EAAOlD,KACT,MAAM,IAAIgC,WAAWnB,EAAO+E,EAAI3B,IAAIxD,MAAO,qDAG7C,MAAOhB,KAAUyK,GAAYP,UAAUzG,EAAQrC,GAE/C,MAAO,CACL,CACE5C,KAAMJ,SAASe,SACfgH,IAAAA,EACAnG,MAAOA,EACPwE,IAAK,CACHxD,MAAOoB,cAAc+D,EAAI3B,IAAIxD,OAC7BF,IAAKsB,cAAcpC,EAAMwE,IAAI1D,MAE/BoK,OAAAA,MAEET,GAIR,SAASnG,SAAOb,GACd,MAAO,CACLjF,KAAMJ,SAAS2K,OACfvE,IAAKf,EAAOzD,MAAOwE,IACnBD,IAAKd,EAAOzD,MAAOuE,IACnBvE,MAAOkI,YAAYzE,EAAOzD,MAAOuE,MAIrC,SAAS4F,QAAQ1G,GACf,MAAO,CACLjF,KAAMJ,SAAS+M,QACf3G,IAAKf,EAAOzD,MAAOwE,IACnBxE,MAAOyD,EAAOzD,MAAOuE,MAAQ2E,MAIjC,SAASkB,SAAS3G,EAAuBrC,GAmBvC,IAEIpB,EAFAwE,EAAMf,EAAOzD,MAAOwE,IACpBD,EAAMd,EAAOzD,MAAOuE,IAKxB,IACGd,EAAOgB,OAAOlE,MACfkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAUsG,SACxCgF,aAAahG,KAAKY,IAClBqF,aAAajG,KAAKF,EAAOgB,OAAOzE,MAAOuE,KACvC,CACA,MAAMvD,EAAQwD,EAAIxD,MAElByC,EAAO/C,OACP8D,EAAM,CAAExD,MAAAA,EAAOF,IAAK2C,EAAOzD,MAAOwE,IAAI1D,KACtCyD,OAAWd,EAAOzD,MAAOuE,MAG3B,IAAKd,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAU4F,IAAK,CACtE,MAAMjD,EAAQwD,EAAIxD,MAIlB,GAFAyC,EAAO/C,OAEH+C,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAUsG,QAChE,MAAM,IAAIpC,WAAWnB,EAAOqC,EAAOzD,MAAOwE,IAAI1D,IAAK,0CAErD2C,EAAO/C,OAEP8D,EAAM,CAAExD,MAAAA,EAAOF,IAAK2C,EAAOzD,MAAOwE,IAAI1D,KACtCyD,OAAWd,EAAOzD,MAAOuE,MAG3B,GAAKoF,aAAahG,KAAKY,GAKrBvE,EAAQ,IAAIoL,KAAK7G,EAAIoE,QAAQ,IAAK,UALP,CAE3B,MAAO0C,IAAc,IAAID,MAAO1E,cAAc4E,MAAM,KACpDtL,EAAQ,IAAIoL,QAAQC,KAAc9G,KAKpC,MAAO,CACL/F,KAAMJ,SAASmN,SACf/G,IAAAA,EACAD,IAAAA,EACAvE,MAAAA,GAIJ,SAASqK,MAAM5G,EAAuBrC,GACpC,IAEIpB,EAFAwE,EAAMf,EAAOzD,MAAOwE,IACpBD,EAAMd,EAAOzD,MAAOuE,IAGxB,GAAI+E,OAAO3F,KAAKY,GACdvE,EAAgB,SAARuE,GAAkBiH,EAAAA,EAAWA,EAAAA,OAChC,GAAIjC,OAAO5F,KAAKY,GACrBvE,EAAyB,SACpB,GAAKyD,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAU4F,IAmBxEjE,EAAQyL,OAAOlH,EAAIoE,QAAQU,WAAY,SAnBsC,CAC7E,MAAMrI,EAAQwD,EAAIxD,MASlB,GAFAyC,EAAO/C,OAEH+C,EAAOgB,OAAOlE,MAAQkD,EAAOgB,OAAOzE,MAAOxB,OAASH,UAAUsG,QAChE,MAAM,IAAIpC,WAAWnB,EAAOqC,EAAOzD,MAAOwE,IAAI1D,IAAK,qCAErD2C,EAAO/C,OAEP6D,OAAWd,EAAOzD,MAAOuE,MACzBC,EAAM,CAAExD,MAAAA,EAAOF,IAAK2C,EAAOzD,MAAOwE,IAAI1D,KACtCd,EAAQyL,OAAOlH,EAAIoE,QAAQU,WAAY,KAKzC,MAAO,CAAE7K,KAAMJ,SAASsN,MAAOlH,IAAAA,EAAKD,IAAAA,EAAKvE,MAAAA,GAG3C,SAASsK,QAAQ7G,GAEf,GAA0B,OAAtBA,EAAOzD,MAAOuE,KAAsC,OAAtBd,EAAOzD,MAAOuE,IAC9C,MAAO,CACL/F,KAAMJ,SAASuN,QACfnH,IAAKf,EAAOzD,MAAOwE,IACnBD,IAAKd,EAAOzD,MAAOuE,IACnBvE,MAAO,GAIX,IAAI4L,EAAQ,GACRpC,OAAO7F,KAAKF,EAAOzD,MAAOuE,KAC5BqH,EAAQ,GACCnC,SAAS9F,KAAKF,EAAOzD,MAAOuE,KACrCqH,EAAQ,EACClC,UAAU/F,KAAKF,EAAOzD,MAAOuE,OACtCqH,EAAQ,GAGV,MAAM5L,EAAQ6I,SACZpF,EACGzD,MAAOuE,IAAIoE,QAAQU,WAAY,IAC/BV,QAAQc,SAAU,IAClBd,QAAQe,UAAW,IACtBkC,GAGF,MAAO,CACLpN,KAAMJ,SAASuN,QACfnH,IAAKf,EAAOzD,MAAOwE,IACnBD,IAAKd,EAAOzD,MAAOuE,IACnBvE,MAAAA,GAIJ,SAASuK,YAAY9G,EAAuBrC,GAC1C,GAA0B,MAAtBqC,EAAOzD,MAAOuE,IAChB,MAAM,IAAIhC,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,8CACsByC,EAAOzD,MAAOuE,OAK1D,MAAMvE,EAAqB,CACzBxB,KAAMJ,SAASqB,YACf+E,IAAKnC,cAAcoB,EAAOzD,MAAOwE,KACjCyG,MAAO,IAKT,IAFAxH,EAAO/C,QAGJ+C,EAAOlD,OACNkD,EAAOzD,MAAOxB,OAASH,UAAUyF,OAAyC,MAA/BL,EAAOzD,MAAgBuE,MACpE,CACA,GAAKd,EAAOzD,MAAgBxB,OAASH,UAAU2F,MAAO,CACpD,MAAM6H,EAAW7L,EAAMiL,MAAMjL,EAAMiL,MAAMpJ,OAAS,GAClD,IAAKgK,EACH,MAAM,IAAItJ,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,MAClB,oDAIJ6K,EAASC,OAAQ,EACjBD,EAASrH,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAIxD,MAErCyC,EAAO/C,OACP,SAGF,MAAOkK,GAAQb,UAAUtG,EAAQrC,GACjC,GAAIwJ,EAAKpM,OAASJ,SAASe,SACzB,MAAM,IAAIoD,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,+DACuC4J,EAAKpM,QAIlE,MAAMuN,EAAoC,CACxCvN,KAAMJ,SAASmB,WACfiF,IAAKnC,cAAcuI,EAAKpG,KACxBoG,KAAAA,EACAkB,OAAO,GAGT9L,EAAMiL,MAAM9I,KAAK4J,GACjBtI,EAAO/C,OAGT,GACE+C,EAAOlD,MACPkD,EAAOzD,MAAOxB,OAASH,UAAUyF,OACD,MAA/BL,EAAOzD,MAAgBuE,IAExB,MAAM,IAAIhC,WACRnB,EACAqC,EAAOlD,KAAOP,EAAMwE,IAAIxD,MAAQyC,EAAOzD,MAAOwE,IAAIxD,6BAC3ByC,EAAOlD,KAAO,cAAgBkD,EAAOzD,MAAOuE,OAMvE,OAFAvE,EAAMwE,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAI1D,IAE3Bd,EAGT,SAAS0K,YAAYjH,EAAuBrC,GAE1C,GAA0B,MAAtBqC,EAAOzD,MAAOuE,IAChB,MAAM,IAAIhC,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,8CACsByC,EAAOzD,MAAOuE,OAI1D,MAAMvE,EAAqB,CACzBxB,KAAMJ,SAASiB,YACfmF,IAAKnC,cAAcoB,EAAOzD,MAAOwE,KACjCyG,MAAO,IAET,IAAIR,EAAsB,GAI1B,IAFAhH,EAAO/C,QAGJ+C,EAAOlD,OACNkD,EAAOzD,MAAOxB,OAASH,UAAUwF,SAA2C,MAA/BJ,EAAOzD,MAAgBuE,MACtE,CACA,GAAKd,EAAOzD,MAAgBxB,OAASH,UAAU2F,MAAO,CACpD,MAAM6H,EAAW7L,EAAMiL,MAAMjL,EAAMiL,MAAMpJ,OAAS,GAClD,IAAKgK,EACH,MAAM,IAAItJ,WACRnB,EACAqC,EAAOzD,MAAOwE,IAAIxD,MAClB,qDAIJ6K,EAASC,OAAQ,EACjBD,EAASrH,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAIxD,WAChC,GAAKyC,EAAOzD,MAAgBxB,OAASH,UAAUsB,QACpD8K,EAAStI,KAAK+B,UAAQT,QACjB,CACL,MAAOmH,KAASoB,GAAuB9B,UAAUzG,EAAQrC,GACnD2K,EAA0B,CAC9BvN,KAAMJ,SAASmB,WACfiF,IAAKnC,cAAcuI,EAAKpG,KACxBoG,KAAAA,EACAkB,OAAO,GAGT9L,EAAMiL,MAAM9I,KAAK4J,GACjBzE,MAAMmD,EAAUuB,GAGlBvI,EAAO/C,OAGT,GACE+C,EAAOlD,MACPkD,EAAOzD,MAAOxB,OAASH,UAAUwF,SACD,MAA/BJ,EAAOzD,MAAgBuE,IAExB,MAAM,IAAIhC,WACRnB,EACAqC,EAAOlD,KAAOP,EAAMwE,IAAIxD,MAAQyC,EAAOzD,MAAOwE,IAAIxD,6BAC3ByC,EAAOlD,KAAO,cAAgBkD,EAAOzD,MAAOuE,OAMvE,OAFAvE,EAAMwE,IAAI1D,IAAM2C,EAAOzD,MAAOwE,IAAI1D,IAE3B,CAACd,EAAOyK,YCniBOwB,SAASC,EAAiBC,GAOhD,SAASC,EAAcC,EAAuBC,GAC5C,IAAK,MAAM/N,KAAQ8N,EACjBE,EAAahO,EAAM+N,GAIvB,SAASC,EAAahO,EAAY+N,GAChC,MAAME,EAAQL,EAAQ5N,EAAKC,MAS3B,OAPIgO,GAA0B,mBAAVA,GACjBA,EAAgBjO,EAAM+N,GAErBE,GAAUA,EAAoBC,OAC/BD,EAAoBC,MAAOlO,EAAM+N,GAG5B/N,EAAKC,MACX,KAAKJ,SAASK,SACZ2N,EAAe7N,EAAkB0M,MAAO1M,GACxC,MAEF,KAAKH,SAASO,MACZ4N,EAAchO,EAAe4H,IAAK5H,GAClC6N,EAAe7N,EAAe0M,MAAO1M,GACrC,MACF,KAAKH,SAASS,SACZ0N,EAAchO,EAAkBqM,KAAMrM,GACtC,MAEF,KAAKH,SAASW,WACZwN,EAAchO,EAAoB4H,IAAK5H,GACvC6N,EAAe7N,EAAoB0M,MAAO1M,GAC1C,MACF,KAAKH,SAASa,cACZsN,EAAchO,EAAuBqM,KAAMrM,GAC3C,MAEF,KAAKH,SAASe,SACZoN,EAAchO,EAAkB4H,IAAK5H,GACrCgO,EAAchO,EAAkByB,MAAOzB,GACvC,MAEF,KAAKH,SAASiB,YACZ+M,EAAe7N,EAAqB0M,MAAO1M,GAC3C,MACF,KAAKH,SAASmB,WACZgN,EAAchO,EAAoBqM,KAAMrM,GACxC,MAEF,KAAKH,SAASqB,YACZ2M,EAAe7N,EAAqB0M,MAAO1M,GAC3C,MAEF,KAAKH,SAASyM,IACd,KAAKzM,SAAS2K,OACd,KAAK3K,SAASuN,QACd,KAAKvN,SAASsN,MACd,KAAKtN,SAAS+M,QACd,KAAK/M,SAASmN,SACd,KAAKnN,SAASuB,QACZ,MAEF,QACE,MAAM,IAAI6C,iCAAiCjE,EAAKC,SAGhDgO,GAAUA,EAAoBE,MAC/BF,EAAoBE,KAAMnO,EAAM+N,GAzEjCtG,WAAWkG,GACbE,EAAcF,EAAK,MAEnBK,EAAaL,EAAK,MCTtB,MAAMS,cAAwC,IAAIC,QAC5CC,SAAYC,IACXH,cAAc1G,IAAI6G,IACrBH,cAAcI,IAAID,EAAM,IAAIF,SAEvBD,cAAcK,IAAIF,IAGrBG,aAAuC,IAAIL,QAC3CM,QAAWJ,IACVG,aAAahH,IAAI6G,IACpBG,aAAaF,IAAID,EAAM,IAAIF,SAEtBK,aAAaD,IAAIF,IAG1B,SAAgBnE,QAAQmE,EAAYR,EAAca,EAAgBC,GAGhE,GAAIxN,SAAS0M,GAAS,CACpB,MAAMjM,EAAQiM,EAAOrB,MAAMoC,QAAQF,GACnC,GAAI9M,EAAQ,EAAG,MAAM,IAAImC,MAAM,2DAE/B8J,EAAOrB,MAAMqC,OAAOjN,EAAO,EAAG+M,QACzB,GAAIvN,QAAQyM,GACjBA,EAAO1B,KAAOwC,MACT,CAAA,IAAIlO,WAAWoN,GAOpB,MAAM,IAAI9J,kCAAkC8J,EAAO9N,qBAN/C8N,EAAOnG,MAAQgH,EACjBb,EAAOnG,IAAMiH,EAEbd,EAAOtM,MAAQoN,EAWnBG,UAAUH,EAJI,CACZvM,MAAOsM,EAAS3I,IAAIxD,MAAMD,KAAOqM,EAAY5I,IAAIxD,MAAMD,KACvDE,QAASkM,EAAS3I,IAAIxD,MAAME,OAASkM,EAAY5I,IAAIxD,MAAME,SAK7D,MAAMsM,EAAgB7M,QAAQwM,EAAS3I,KACjCiJ,EAAmB9M,QAAQyM,EAAY5I,KAM7CkJ,UALe,CACb7M,MAAO4M,EAAiB5M,MAAQ2M,EAAc3M,MAC9CI,QAASwM,EAAiBxM,QAAUuM,EAAcvM,SAGlCiM,QAAQJ,GAAOM,EAAaD,GAGhD,SAAgBQ,OAAOb,EAAYR,EAAcsB,EAAavN,GAC5D,IAAKT,SAAS0M,GACZ,MAAM,IAAI9J,kCAAmC8J,EAAgB9N,oBAK/D,IAAIqP,EACAC,EAHJzN,EAAiB,MAATA,EAAgBA,EAAQiM,EAAOrB,MAAMpJ,OAIzCzC,cAAckN,IAAW9M,cAAc8M,KACtCuB,MAAAA,EAAOC,OAAAA,GAAWC,aAAazB,EAAQsB,EAAqBvN,MAE5DwN,MAAAA,EAAOC,OAAAA,GAAWE,gBACnB1B,EACAsB,EACAvN,IAIJkN,UAAUK,EAAOC,GAKjB,MAAMhC,EAAWS,EAAOrB,MAAM5K,EAAQ,GAChC4N,EAAkBpC,GAAYqB,QAAQJ,GAAME,IAAInB,GAClDoC,IACFH,EAAOjN,OAASoN,EAAgBpN,MAChCiN,EAAO7M,SAAWgN,EAAgBhN,QAS9B3B,aAAasO,IAAU/B,GAAYS,EAAOrB,MAAM5K,EAAQ,KAC1DyN,EAAO7M,SAAW,GAGpBiM,QAAQJ,GAAMoB,OAAOrC,IAGPqB,QAAQJ,GAChBC,IAAIa,EAAOE,GAGrB,SAASE,gBACP1B,EACAsB,EACAvN,GAEA,IAAKP,QAAQ8N,GACX,MAAM,IAAIpL,kCAAmCoL,EAAepP,SAG9D,MAAMqN,EAAWS,EAAOrB,MAAM5K,EAAQ,GAChC8N,EAAiB7P,WAAWgO,KAAYA,EAAOrB,MAAMpJ,OAE3DyK,EAAOrB,MAAMqC,OAAOjN,EAAO,EAAGuN,GAI9B,MAAM5M,EAAQ6K,EACV,CACE9K,KAAM8K,EAASrH,IAAI1D,IAAIC,KACvBG,OAASxB,UAAUmM,GAAwCS,EAAO9H,IAAIxD,MAAME,OAA7C2K,EAASrH,IAAIxD,MAAME,QAEpDkB,cAAckK,EAAO9H,IAAIxD,OAEvBoN,EAAW1P,QAAQkP,IAAU9O,aAAa8O,GAChD,IAAIS,EAAgB,EAChBF,IAGFE,EADSD,EACO,EAEA,GAElBpN,EAAMD,MAAQsN,EAEd,MAAMR,EAAQ,CACZhN,MAAOG,EAAMD,KAAO6M,EAAMpJ,IAAIxD,MAAMD,KACpCE,QAASD,EAAME,OAAS0M,EAAMpJ,IAAIxD,MAAME,QAIpCoN,EAAa3N,QAAQiN,EAAMpJ,KAMjC,MAAO,CAAEqJ,MAAAA,EAAOC,OALD,CACbjN,MAAOyN,EAAWzN,OAASwN,EAAgB,GAC3CpN,QAASqN,EAAWrN,UAMxB,SAAS8M,aACPzB,EACAsB,EACAvN,GAEA,IAAKf,aAAasO,GAChB,MAAM,IAAIpL,kCAAmCoL,EAAepP,SAI9D,MAAMqN,EAAoB,MAATxL,EAAgBiM,EAAOrB,MAAM5K,EAAQ,GAAK6E,KAAKoH,EAAOrB,OACjEsD,EAAmB,MAATlO,GAAiBA,IAAUiM,EAAOrB,MAAMpJ,OAExDyK,EAAOrB,MAAMqC,OAAOjN,EAAO,EAAGuN,GAG9B,MAAMY,IAAkB3C,EAClB4C,GAAkBF,EAClBG,EAAaH,IAA2B,IAAhBX,EAAM9B,MAChC0C,IACF3C,EAAUC,OAAQ,GAEhB2C,IACFb,EAAM9B,OAAQ,GAKhB,MAAM6C,EAAevP,cAAckN,IAAWsC,QAAQtC,GAIhDtL,EAAQ6K,EACV,CACE9K,KAAM8K,EAASrH,IAAI1D,IAAIC,KACvBG,OAAQyN,EACHjP,UAAUmM,GAETS,EAAO9H,IAAIxD,MAAME,OADjB2K,EAASrH,IAAIxD,MAAME,OAErB2K,EAASrH,IAAI1D,IAAII,QAEvBkB,cAAckK,EAAO9H,IAAIxD,OAE7B,IAAIqN,EAAgB,EACpB,GAAIM,EACFN,EAAgB,MACX,CACL,MAAMQ,EAAa,EACbC,EAAe,EACrB9N,EAAME,QAAUsN,EAAgBK,EAAaC,EAE/C9N,EAAMD,MAAQsN,EAEd,MAAMR,EAAQ,CACZhN,MAAOG,EAAMD,KAAO6M,EAAMpJ,IAAIxD,MAAMD,KACpCE,QAASD,EAAME,OAAS0M,EAAMpJ,IAAIxD,MAAME,QAIpCoN,EAAa3N,QAAQiN,EAAMpJ,KAMjC,MAAO,CAAEqJ,MAAAA,EAAOC,OALD,CACbjN,MAAOyN,EAAWzN,OAASwN,EAAgB,GAC3CpN,QAASqN,EAAWrN,SAAWuN,GAAiBC,EAAiB,EAAI,IAAMC,EAAa,EAAI,KAMhG,SAAgBK,OAAOjC,EAAYR,EAAc/N,GAc/C,IAAKqB,SAAS0M,GACZ,MAAM,IAAI9J,kCAAkC8J,EAAO9N,oBAGrD,IAAI6B,EAAQiM,EAAOrB,MAAMoC,QAAQ9O,GACjC,GAAI8B,EAAQ,EAAG,CAIb,IAFAA,EAAQiM,EAAOrB,MAAMxJ,UAAUmJ,GAAQ/K,QAAQ+K,IAASA,EAAKA,OAASrM,IAE1D,EACV,MAAM,IAAIiE,MAAM,6CAGlBjE,EAAO+N,EAAOrB,MAAM5K,GAGtB,MAAMwL,EAAWS,EAAOrB,MAAM5K,EAAQ,GACtC,IAAIK,EAAO4L,EAAOrB,MAAM5K,EAAQ,GAGhCiM,EAAOrB,MAAMqC,OAAOjN,EAAO,GAC3B,IAAI2O,EAAerO,QAAQpC,EAAKiG,KAU5B9D,GAAQhB,UAAUgB,IAASA,EAAK8D,IAAIxD,MAAMD,OAASxC,EAAKiG,IAAI1D,IAAIC,OAElEiO,EAAerO,QAAQ,CAAEK,MAAOzC,EAAKiG,IAAIxD,MAAOF,IAAKJ,EAAK8D,IAAI1D,MAI9DJ,EAAO4L,EAAOrB,MAAM5K,EAAQ,GAG5BiM,EAAOrB,MAAMqC,OAAOjN,EAAO,IAI7B,MAAM4O,EAAYpD,GAAYvM,aAAauM,GACrCqD,EAAwBrD,GAAYA,EAASrH,IAAI1D,IAAIC,OAASxC,EAAKiG,IAAIxD,MAAMD,KAC7EoO,EAAmBzO,GAAQA,EAAK8D,IAAIxD,MAAMD,OAASxC,EAAKiG,IAAI1D,IAAIC,KAChEqO,EAAYH,IAAcC,GAAyBC,GAEnDrB,EAAS,CACbjN,QAASmO,EAAanO,OAASuO,EAAY,EAAI,IAC/CnO,SAAU+N,EAAa/N,SAIrBgO,GAAaC,IACfpB,EAAO7M,SAAW,GAEhBgO,GAAapD,IAAanL,IAC3BmL,EAA+CC,OAAQ,GAI1D,MAAMvE,EAASsE,GAAYS,EACrB+C,EAAiBxD,EAAWqB,QAAQJ,GAAQD,SAASC,GACrDwC,EAAepC,QAAQJ,GACvBmB,EAAkBoB,EAAerC,IAAIzF,GACvC0G,IACFH,EAAOjN,OAASoN,EAAgBpN,MAChCiN,EAAO7M,SAAWgN,EAAgBhN,SAEpC,MAAMsO,EAAiBD,EAAatC,IAAIzO,GACpCgR,IACFzB,EAAOjN,OAAS0O,EAAe1O,MAC/BiN,EAAO7M,SAAWsO,EAAetO,SAGnCoO,EAAetC,IAAIxF,EAAQuG,GAG7B,SAAgB0B,oBACd1C,EACAvO,EACAkR,GAA2B,GAG3B,IAAKA,EAAiB,OACtB,IAAKlR,EAAK0M,MAAMpJ,OAAQ,OAGxB6L,UAAU,CAAE7M,MAAO,EAAGI,QAAS,GAAK4L,SAASC,GAAOvO,GAGpD,MAAMmR,EAAYxK,KAAK3G,EAAK0M,OAC5ByC,UAAU,CAAE7M,MAAO,EAAGI,QAAS,GAAKiM,QAAQJ,GAAO4C,GAGrD,SAAgBC,mBACd7C,EACAvO,EACAqR,GAA2B,GAG3B,IAAKA,EAAiB,OACtB,IAAKrR,EAAK0M,MAAMpJ,OAAQ,OAExB,MAAM6N,EAAYxK,KAAK3G,EAAK0M,OAC5ByE,EAAU5D,OAAQ,EAElB4B,UAAU,CAAE7M,MAAO,EAAGI,QAAS,GAAKiM,QAAQJ,GAAO4C,YAGrCG,YAAY/C,GAC1B,MAAML,EAAQI,SAASC,GACjBJ,EAAOQ,QAAQJ,GAEfgB,EAAkE,CACtEjN,MAAO,EACPI,QAAS,IAGX,SAAS6O,EAAWvR,GAClBA,EAAKiG,IAAIxD,MAAMD,MAAQ+M,EAAOjN,MAC9BtC,EAAKiG,IAAIxD,MAAME,QAAU4M,EAAO7M,QAAQ1C,EAAKiG,IAAIxD,MAAMD,OAAS,EAEhE,MAAMgP,EAAWtD,EAAMO,IAAIzO,GACvBwR,IACFjC,EAAOjN,OAASkP,EAASlP,MACzBiN,EAAO7M,QAAQ1C,EAAKiG,IAAIxD,MAAMD,OAC3B+M,EAAO7M,QAAQ1C,EAAKiG,IAAIxD,MAAMD,OAAS,GAAKgP,EAAS9O,SAG5D,SAAS+O,EAASzR,GAChBA,EAAKiG,IAAI1D,IAAIC,MAAQ+M,EAAOjN,MAC5BtC,EAAKiG,IAAI1D,IAAII,QAAU4M,EAAO7M,QAAQ1C,EAAKiG,IAAI1D,IAAIC,OAAS,EAE5D,MAAMkP,EAAUvD,EAAKM,IAAIzO,GACrB0R,IACFnC,EAAOjN,OAASoP,EAAQpP,MACxBiN,EAAO7M,QAAQ1C,EAAKiG,IAAI1D,IAAIC,OACzB+M,EAAO7M,QAAQ1C,EAAKiG,IAAI1D,IAAIC,OAAS,GAAKkP,EAAQhP,SAGzD,MAAMiP,EAAgB,CACpBzD,MAAOqD,EACPpD,KAAMsD,GAGR/D,SAASa,EAAM,CACb3M,CAAC/B,SAASK,UAAWyR,EACrB/P,CAAC/B,SAASO,OAAQuR,EAClB/P,CAAC/B,SAASW,YAAamR,EACvB/P,CAAC/B,SAASqB,aAAcyQ,EACxB/P,CAAC/B,SAASiB,aAAc6Q,EAExB/P,CAAC/B,SAASmB,YAAa2Q,EACvB/P,CAAC/B,SAASS,UAAWqR,EACrB/P,CAAC/B,SAASa,eAAgBiR,EAE1B/P,CAAC/B,SAASe,UAAW,CACnBgB,MAAM5B,GACJ,MAAM4R,EAAa5R,EAAKiG,IAAIxD,MAAMD,KAAO+M,EAAOjN,MAC1CuP,EAAa1D,EAAKM,IAAIzO,EAAK4H,KACjC5H,EAAK2M,SAAW4C,EAAO7M,QAAQkP,IAAe,IAAMC,EAAaA,EAAWnP,QAAU,GAEtF6O,EAAWvR,IAEbmO,KAAMsD,GAGR7P,CAAC/B,SAASyM,KAAMqF,EAChB/P,CAAC/B,SAAS2K,QAASmH,EACnB/P,CAAC/B,SAASuN,SAAUuE,EACpB/P,CAAC/B,SAASsN,OAAQwE,EAClB/P,CAAC/B,SAAS+M,SAAU+E,EACpB/P,CAAC/B,SAASmN,UAAW2E,EACrB/P,CAAC/B,SAASuB,SAAUuQ,IAGtBvD,cAAcuB,OAAOpB,GACrBG,aAAaiB,OAAOpB,GAGtB,SAAgBS,UACdhP,EACA8R,EACAC,EAAyC,IAEzC,MAAMC,gBAAEA,GAAkB,GAAUD,EAC9BH,EAAa5R,EAAKiG,IAAIxD,MAAMD,MAC5BF,MAAEA,EAAKI,QAAEA,GAAYoP,EACrBG,EAAQjS,IACPgS,GAAmBhS,EAAKiG,IAAIxD,MAAMD,OAASoP,IAC9C5R,EAAKiG,IAAIxD,MAAME,QAAUD,EACzB1C,EAAKiG,IAAI1D,IAAII,QAAUD,GAEzB1C,EAAKiG,IAAIxD,MAAMD,MAAQF,EACvBtC,EAAKiG,IAAI1D,IAAIC,MAAQF,GAwBvB,OArBAoL,SAAS1N,EAAM,CACb4B,CAAC/B,SAASO,OAAQ6R,EAClBrQ,CAAC/B,SAASS,UAAW2R,EACrBrQ,CAAC/B,SAASW,YAAayR,EACvBrQ,CAAC/B,SAASa,eAAgBuR,EAC1BrQ,CAAC/B,SAASe,UAAUZ,GAClBiS,EAAKjS,GACLA,EAAK2M,QAAUjK,GAEjBd,CAAC/B,SAASyM,KAAM2F,EAChBrQ,CAAC/B,SAAS2K,QAASyH,EACnBrQ,CAAC/B,SAASuN,SAAU6E,EACpBrQ,CAAC/B,SAASsN,OAAQ8E,EAClBrQ,CAAC/B,SAAS+M,SAAUqF,EACpBrQ,CAAC/B,SAASmN,UAAWiF,EACrBrQ,CAAC/B,SAASiB,aAAcmR,EACxBrQ,CAAC/B,SAASmB,YAAaiR,EACvBrQ,CAAC/B,SAASqB,aAAc+Q,EACxBrQ,CAAC/B,SAASuB,SAAU6Q,IAGfjS,EAGT,SAASqQ,QAAQvC,GACf,IAAKA,EAAMpB,MAAMpJ,OAAQ,OAAO,EAGhC,OADalB,QAAQ0L,EAAM7H,KACf3D,MAAQwL,EAAMpB,MAAMpJ,OAGlC,SAAS6L,UAAUI,EAAc2C,EAAkBlS,EAAYmS,GAC7D,MAAMzC,EAAkBwC,EAAQzD,IAAI0D,GAAQnS,GACxC0P,IACFH,EAAOjN,OAASoN,EAAgBpN,MAChCiN,EAAO7M,SAAWgN,EAAgBhN,SAGpCwP,EAAQ1D,IAAIxO,EAAMuP,YCxeJ6C,mBACd,MAAO,CACLnS,KAAMJ,SAASK,SACf+F,IAAK,CAAExD,MAAOsB,OAAQxB,IAAKwB,QAC3B2I,MAAO,aAIK2F,cAAczK,GAC5B,MAAM0K,EAAYC,iBAAiB3K,GAEnC,MAAO,CACL3H,KAAMJ,SAASO,MACf6F,IAAKnC,cAAcwO,EAAUrM,KAC7B2B,IAAK0K,EACL5F,MAAO,aAIK6F,iBAAiB3K,GAC/B,MAAM5B,EAAMwM,cAAc5K,GAE1B,MAAO,CACL3H,KAAMJ,SAASS,SACf2F,IAAK,CACHxD,MAAOsB,OACPxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,OAAS,IAEvC+I,KAAM,CACJpM,KAAMJ,SAASyM,IACfrG,IAAK,CACHxD,MAAO,CAAED,KAAM,EAAGG,OAAQ,GAC1BJ,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,OAAS,IAEvC7B,MAAOmG,EACP5B,IAAAA,aAKUyM,mBAAmB7K,GACjC,MAAM8K,EAAkBC,sBAAsB/K,GAE9C,MAAO,CACL3H,KAAMJ,SAASW,WACfyF,IAAKnC,cAAc4O,EAAgBzM,KACnC2B,IAAK8K,EACLhG,MAAO,aAIKiG,sBAAsB/K,GACpC,MAAM5B,EAAMwM,cAAc5K,GAE1B,MAAO,CACL3H,KAAMJ,SAASa,cACfuF,IAAK,CACHxD,MAAOsB,OACPxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,OAAS,IAEvC+I,KAAM,CACJpM,KAAMJ,SAASyM,IACfrG,IAAK,CACHxD,MAAO,CAAED,KAAM,EAAGG,OAAQ,GAC1BJ,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,OAAS,IAEvC7B,MAAOmG,EACP5B,IAAAA,aAKU4M,iBAAiBhL,EAAenG,GAC9C,MAAMoR,EAAWC,YAAYlL,IACvBjF,OAAEA,GAAWkQ,EAAS5M,IAAI1D,IAE1BoK,EAAShK,EAAS,EAQxB,OANAqM,UACEvN,EACA,CAAEa,MAAO,EAAGI,QAASC,EAAS,EAAIlB,EAAMwE,IAAIxD,MAAME,QAClD,CAAEqP,iBAAiB,IAGd,CACL/R,KAAMJ,SAASe,SACfqF,IAAK,CACHxD,MAAOoB,cAAcgP,EAAS5M,IAAIxD,OAClCF,IAAKsB,cAAcpC,EAAMwE,IAAI1D,MAE/BqF,IAAKiL,EACLlG,OAAAA,EACAlL,MAAAA,GAIJ,MAAMsR,YAAc,iBACpB,SAASP,cAAc/Q,GACrB,OAAOA,EAAMkH,IAAIqK,GAASD,YAAY3N,KAAK4N,GAAQA,EAAOpK,KAAKC,UAAUmK,IAAQlK,KAAK,cAGxEgK,YAAYrR,GAC1B,MAAMuE,EAAMwM,cAAc/Q,GAE1B,MAAO,CACLxB,KAAMJ,SAASyM,IACfrG,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,SAClD0C,IAAAA,EACAvE,MAAAA,YAIYwR,eAAexR,GAC7B,MAAMuE,EAAM4C,KAAKC,UAAUpH,GAE3B,MAAO,CACLxB,KAAMJ,SAAS2K,OACfvE,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,SAClD0C,IAAAA,EACAvE,MAAAA,YAIYyR,gBAAgBzR,GAC9B,MAAMuE,EAAMvE,EAAM6F,WAElB,MAAO,CACLrH,KAAMJ,SAASuN,QACfnH,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,SAClD0C,IAAAA,EACAvE,MAAAA,YAIY0R,cAAc1R,GAC5B,MAAMuE,EAAMvE,EAAM6F,WAElB,MAAO,CACLrH,KAAMJ,SAASsN,MACflH,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,SAClD0C,IAAAA,EACAvE,MAAAA,YAIY2R,gBAAgB3R,GAC9B,MAAO,CACLxB,KAAMJ,SAAS+M,QACf3G,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQlB,EAAQ,EAAI,IAC1DA,MAAAA,YAIY4R,iBAAiB5R,GAC/B,MAAMuE,EAAMvE,EAAM0G,cAElB,MAAO,CACLlI,KAAMJ,SAASmN,SACf/G,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQqD,EAAI1C,SAClD0C,IAAAA,EACAvE,MAAAA,GAIJ,SAAgB6R,sBACd,MAAO,CACLrT,KAAMJ,SAASiB,YACfmF,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQ,IAC9C+J,MAAO,aAIK6G,mBAAmBlH,GACjC,MAAO,CACLpM,KAAMJ,SAASmB,WACfiF,IAAKnC,cAAcuI,EAAKpG,KACxBoG,KAAAA,EACAkB,OAAO,GAIX,SAAgBiG,sBACd,MAAO,CACLvT,KAAMJ,SAASqB,YACf+E,IAAK,CAAExD,MAAOsB,OAAQxB,IAAK,CAAEC,KAAM,EAAGG,OAAQ,IAC9C+J,MAAO,aC1LK+G,eAAeC,GA0B7B,OAzB0BA,EAAShH,MAAMiH,OAAOtH,IAC9C,IAAK1L,WAAW0L,GAAO,OAAO,EAE9B,MAAMuH,EAAkB3S,cAAcoL,EAAK5K,OACrCoS,EACJhT,cAAcwL,EAAK5K,QACnB4K,EAAK5K,MAAMiL,MAAMpJ,QACjBrC,cAAcoL,EAAK5K,MAAMiL,MAAM,GAAGL,MAEpC,OAAOuH,GAAmBC,IAGVC,QAAQ9T,IACxBwQ,OAAOkD,EAAUA,EAAU1T,GAEvBiB,cAAcjB,EAAKyB,OACrB2N,OAAOsE,EAAUA,EAAUK,YAAY/T,IAEvCgU,iBAAiBhU,GAAM8T,QAAQG,IAC7B7E,OAAOsE,EAAUA,EAAUO,OAKjC3C,YAAYoC,GACLA,EAGT,SAASK,YAAYG,GACnB,MAAMzI,EAAQ4G,cAAc6B,EAAUtM,IAAInG,OAE1C,IAAK,MAAM4K,KAAS6H,EAAUzS,MAAsBiL,MAClD0C,OAAO3D,EAAOA,EAAOY,EAAKA,MAI5B,OADAiF,YAAY7F,GACLA,EAGT,SAASuI,iBAAiBE,GACxB,MAAM3F,EAAO6D,mBAEb,IAAK,MAAM+B,KAAsBD,EAAUzS,MAAsBiL,MAAO,CACtE,MAAMuH,EAAcxB,mBAAmByB,EAAUtM,IAAInG,OACrD2N,OAAOb,EAAMA,EAAM0F,GAEnB,IAAK,MAAMG,KAAsBD,EAAkB9H,KAAqBK,MACtE0C,OAAOb,EAAM0F,EAAaG,EAAkB/H,MAKhD,OADAiF,YAAY/C,GACLA,EAAK7B,eAGE2H,iBAAiBX,EAAoBY,GAEnD,OAAOZ,WAGOa,iBAAiBb,GAC/B,IAAIpE,EAAQ,EACRhC,EAAW,EACf,IAAK,MAAMjB,KAAQqH,EAAShH,MACT,IAAbY,GAAkBjB,EAAKpG,IAAIxD,MAAMD,KAAO,EAE1C8M,EAAQ,EAAIjD,EAAKpG,IAAIxD,MAAMD,KAClB6J,EAAKpG,IAAIxD,MAAMD,KAAO8M,EAAQhC,EAAW,IAClDgC,GAAShC,EAAW,GAAKjB,EAAKpG,IAAIxD,MAAMD,KAAO8M,IAGjDN,UAAU3C,EAAM,CACd/J,MAAOgN,EACP5M,QAAS,IAEX4K,EAAWjB,EAAKpG,IAAI1D,IAAIC,KAG1B,OAAOkR,ECpFT,MAAMc,eAAiB,CACrBC,WAAY,GACZC,eAAe,EACfC,gBAAgB,GAGlB,SAAwBC,QAAQnT,EAAY6S,EAAiB,IAC3DA,EAASxN,OAAO+N,OAAO,GAAIL,eAAgBF,GAC3C7S,EAAQqT,OAAOrT,GAEf,MAAMiS,EAAWtB,mBACjB,IAAK,MAAM/F,KAAQ0I,WAAWtT,EAAO6S,GACnClF,OAAOsE,EAAUA,EAAUrH,GAc7B,OAZAiF,YAAYoC,GAKMtL,KAChBsL,EACAD,eACAC,GAAYW,iBAAiBX,GAC7Ba,kBAMJ,SAAUQ,WAAWpN,EAAa2M,GAChC,IAAK,MAAM1M,KAAOd,OAAO2B,KAAKd,SACtBiL,iBAAiB,CAAChL,GAAM+D,YAAUhE,EAAOC,GAAM0M,IAIzD,SAAS3I,YAAUlK,EAAY6S,GAC7B,GAAa,MAAT7S,EACF,MAAM,IAAIwC,MAAM,mDAGlB,OAAI+C,SAASvF,GACJwR,eAAexR,GACbwF,UAAUxF,GACZyR,gBAAgBzR,GACdyF,QAAQzF,GACV0R,cAAc1R,GACZ0F,UAAU1F,GACZ2R,gBAAgB3R,GACd2F,OAAO3F,GACT4R,iBAAiB5R,GACfuB,MAAMC,QAAQxB,GAChBuT,gBAAgBvT,EAAO6S,GAEvBW,gBAAgBxT,EAAO6S,GAIlC,SAASU,gBAAgBvT,EAAmB6S,GAC1C,MAAMrI,EAAeqH,sBACrB,IAAK,MAAM4B,KAAWzT,EAAO,CAI3B2N,OAAOnD,EAAcA,EAFKsH,mBADb5H,YAAUuJ,EAASZ,KASlC,OAJArD,oBAAoBhF,EAAcA,EAAcqI,EAAOK,gBACvDvD,mBAAmBnF,EAAcA,EAAcqI,EAAOI,eACtDpD,YAAYrF,GAELA,EAGT,SAASgJ,gBAAgBxT,EAAe6S,GAEtC,IAAK9M,SADL/F,EAAQqT,OAAOrT,IACO,OAAOkK,YAAUlK,EAAO6S,GAE9C,MAAMa,EAAe3B,sBACf9G,EAAQ,IAAIqI,WAAWtT,EAAO6S,IACpC,IAAK,MAAMjI,KAAQK,EAAO,CAGxB0C,OAAO+F,EAAcA,EAFK5B,mBAAmBlH,IAQ/C,OAJA4E,oBAAoBkE,EAAcA,EAAcb,EAAOK,gBACvDvD,mBAAmB+D,EAAcA,EAAcb,EAAOI,eACtDpD,YAAY6D,GAELA,EAGT,SAASL,OAAOrT,GACd,OAAOA,IAAU2F,OAAO3F,IAAkC,mBAAjBA,EAAMqT,OAAwBrT,EAAMqT,SAAWrT,ECvG1F,MAAM+B,YAAc,aAEpB,SAAwB4R,OAAOzH,EAAU0H,EAAkB,MACzD,MAAM/S,EAAkB,GAqExB,OAnEAoL,SAASC,EAAK,CACZ/L,CAAC/B,SAASS,UAAUN,GAClB,MAAMyC,MAAEA,EAAKF,IAAEA,GAAQvC,EAAKiG,IAE5BqP,MAAMhT,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,KAC7E2S,MAAMhT,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,MAE3EX,CAAC/B,SAASa,eAAeV,GACvB,MAAMyC,MAAEA,EAAKF,IAAEA,GAAQvC,EAAKiG,IAE5BqP,MAAMhT,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,MAC7E2S,MAAMhT,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,OAG3EX,CAAC/B,SAASe,UAAUZ,GAClB,MACEyC,OAAOD,KAAEA,IACPxC,EAAKiG,IACTqP,MACEhT,EACA,CAAEG,MAAO,CAAED,KAAAA,EAAMG,OAAQ3C,EAAK2M,QAAUpK,IAAK,CAAEC,KAAAA,EAAMG,OAAQ3C,EAAK2M,OAAS,IAC3E,MAGJ/K,CAAC/B,SAASyM,KAAKtM,GACbsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKgG,MAG9BpE,CAAC/B,SAAS2K,QAAQxK,GAChBsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKgG,MAE9BpE,CAAC/B,SAASuN,SAASpN,GACjBsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKgG,MAE9BpE,CAAC/B,SAASsN,OAAOnN,GACfsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKgG,MAE9BpE,CAAC/B,SAAS+M,SAAS5M,GACjBsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKyB,MAAM6F,aAEpC1F,CAAC/B,SAASmN,UAAUhN,GAClBsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKgG,MAG9BpE,CAAC/B,SAASiB,aAAad,GACrB,MAAMyC,MAAEA,EAAKF,IAAEA,GAAQvC,EAAKiG,IAC5BqP,MAAMhT,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,KAC7E2S,MAAMhT,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,MAG3EX,CAAC/B,SAASqB,aAAalB,GACrB,MAAMyC,MAAEA,EAAKF,IAAEA,GAAQvC,EAAKiG,IAC5BqP,MAAMhT,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,KAC7E2S,MAAMhT,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,MAE3EX,CAAC/B,SAASmB,YAAYhB,GACpB,IAAKA,EAAKuN,MAAO,OAEjB,MAAM9K,EAAQzC,EAAKiG,IAAI1D,IACvB+S,MAAMhT,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,MAG/Ef,CAAC/B,SAASuB,SAASpB,GACjBsV,MAAMhT,EAAOtC,EAAKiG,IAAKjG,EAAKgG,QAIzB1D,EAAMwG,KAAKuM,GAAWA,EAG/B,SAASC,MAAMhT,EAAiB2D,EAAeD,GAC7C,MAAMuP,EAAYvP,EAAI+G,MAAMvJ,aACtBgS,EAAiBvP,EAAI1D,IAAIC,KAAOyD,EAAIxD,MAAMD,KAAO,EAEvD,GAAI+S,EAAUjS,SAAWkS,EACvB,MAAM,IAAIvR,4DAC8CuR,gBAA6BxP,MAIvF,IAAK,IAAIiC,EAAIhC,EAAIxD,MAAMD,KAAMyF,GAAKhC,EAAI1D,IAAIC,KAAMyF,IAAK,CACnD,MAAMzF,EAAOY,UAAQd,EAAO2F,GACtBwN,EAAgBxN,IAAMhC,EAAIxD,MAAMD,KAChCkT,EAAczN,IAAMhC,EAAI1D,IAAIC,KAE5BgK,EAASiJ,EACXjT,EAAKe,OAAO,EAAG0C,EAAIxD,MAAME,QAAQgT,OAAO1P,EAAIxD,MAAME,OAAQmC,OAC1D,GACE2H,EAAQiJ,EAAclT,EAAKe,OAAO0C,EAAI1D,IAAII,QAAU,GAE1DL,EAAM2F,EAAI,GAAKuE,EAAS+I,EAAUtN,EAAIhC,EAAIxD,MAAMD,MAAQiK,GAI5D,SAASrJ,UAAQd,EAAiBR,GAChC,IAAKQ,EAAMR,EAAQ,GACjB,IAAK,IAAImG,EAAI,EAAGA,EAAInG,EAAOmG,IACpB3F,EAAM2F,KAAI3F,EAAM2F,GAAK,IAI9B,OAAO3F,EAAMR,EAAQ,YC1GC8T,KAAKjI,EAAU9K,EAAgB,IACrD,MAAMX,EAAS2E,QACTgP,EAAsB,IAAIC,IAC1BC,EAA4B,IAAID,IAChCE,EAAuB,IAAIF,IACjC,IACIG,EADAC,EAAchU,EAEdiU,GAAO,EA0EX,OAxEAzI,SAASC,EAAK,CACZ/L,CAAC/B,SAASO,OAAOJ,GACf,MAAM4H,EAAM5H,EAAK4H,IAAIyE,KAAK5K,MAC1B,IACE2U,YAAYlU,EAAQ0F,EAAK5H,EAAKC,KAAM,CAAE4V,OAAAA,EAAQE,aAAAA,EAAcC,QAAAA,IAC5D,MAAOK,GACP,MAAM,IAAIrS,WAAWnB,EAAO7C,EAAK4H,IAAI3B,IAAIxD,MAAO4T,EAAInS,SAGtD,MAAMoS,EAAaC,QAAQ3O,GAC3BiO,EAAOW,IAAIF,GACXN,EAAQQ,IAAIF,GAEZJ,EAASO,YAAYvU,EAAQ0F,IAG/BhG,CAAC/B,SAASW,YAAYR,GACpB,MAAM4H,EAAM5H,EAAK4H,IAAIyE,KAAK5K,MAE1B,IACE2U,YAAYlU,EAAQ0F,EAAK5H,EAAKC,KAAM,CAAE4V,OAAAA,EAAQE,aAAAA,EAAcC,QAAAA,IAC5D,MAAOK,GACP,MAAM,IAAIrS,WAAWnB,EAAO7C,EAAK4H,IAAI3B,IAAIxD,MAAO4T,EAAInS,SAGtD,MAAMoS,EAAaC,QAAQ3O,GAC3BmO,EAAaS,IAAIF,GACjBN,EAAQQ,IAAIF,GAEZJ,EAASQ,iBAAiBxU,EAAQ0F,IAGpChG,CAAC/B,SAASe,UAAW,CACnBgB,MAAM5B,GACJ,GAAImW,EAAM,OAEV,MAAMvO,EAAM5H,EAAK4H,IAAInG,MACrB,IACE2U,YAAYF,EAAQtO,EAAK5H,EAAKC,KAAM,CAAE4V,OAAAA,EAAQE,aAAAA,EAAcC,QAAAA,IAC5D,MAAOK,GACP,MAAM,IAAIrS,WAAWnB,EAAO7C,EAAK4H,IAAI3B,IAAIxD,MAAO4T,EAAInS,SAGtD,MAAMzC,EAAQkV,QAAQ3W,EAAKyB,QACZmG,EAAItE,OAAS,EAAImT,YAAYP,EAAQtO,EAAIgP,MAAM,GAAI,IAAMV,GAEjEvP,KAAKiB,IAASnG,EACrBuU,EAAQQ,IAAID,QAAQ3O,IAEhB3G,cAAcjB,EAAKyB,SACrBwU,EAAkBC,EAClBA,EAASzU,IAGbG,KAAK5B,GACCiB,cAAcjB,EAAKyB,SACrByU,EAASD,KAKfrU,CAAC/B,SAASqB,aAAc,CACtBU,QAEEuU,GAAO,GAETvU,OACEuU,GAAO,MAKNjU,WAGOyU,QAAQ3W,GACtB,OAAQA,EAAKC,MACX,KAAKJ,SAASqB,YACZ,MAAMgB,EAAS2E,QAUf,OARA7G,EAAK0M,MAAMoH,QAAQ,EAAGzH,KAAAA,MACpB,MAAMzE,EAAMyE,EAAKzE,IAAInG,MACfA,EAAQkV,QAAQtK,EAAK5K,QAEZmG,EAAItE,OAAS,EAAImT,YAAYvU,EAAQ0F,EAAIgP,MAAM,GAAI,IAAM1U,GACjEyE,KAAKiB,IAASnG,IAGhBS,EAET,KAAKrC,SAASiB,YACZ,OAAOd,EAAK0M,MAAM/D,IAAI0D,GAAQsK,QAAQtK,EAAKA,OAE7C,KAAKxM,SAAS2K,OACd,KAAK3K,SAASuN,QACd,KAAKvN,SAASsN,MACd,KAAKtN,SAAS+M,QACd,KAAK/M,SAASmN,SACZ,OAAOhN,EAAKyB,MAEd,QACE,MAAM,IAAIwC,kCAAmCjE,EAAcC,UAIjE,SAASmW,YACPzO,EACAC,EACA3H,EACA4W,GAGA,IAAIC,EAAkB,GAClBhV,EAAQ,EACZ,IAAK,MAAMkR,KAAQpL,EAAK,CAGtB,GAFAkP,EAAMlT,KAAKoP,IAENtL,IAAIC,EAAQqL,GAAO,OACxB,GAAI+D,YAAYpP,EAAOqL,IACrB,MAAM,IAAI/O,2DAA2D6S,EAAMhO,KAAK,QAGlF,MAAMkO,EAAeT,QAAQO,GAC7B,GAAI9T,MAAMC,QAAQ0E,EAAOqL,MAAW6D,EAAMd,aAAarO,IAAIsP,GACzD,MAAM,IAAI/S,sDAAsD+S,KAGlE,MAAMC,EAAenV,IAAU8F,EAAItE,OAAS,EAC5CqE,EAAS3E,MAAMC,QAAQ0E,EAAOqL,KAAUiE,EAAetQ,KAAKgB,EAAOqL,IAASrL,EAAOqL,GAGrF,MAAMsD,EAAaC,QAAQ3O,GAG3B,GAAID,GAAU1H,IAASJ,SAASO,OAASyW,EAAMb,QAAQtO,IAAI4O,GACzD,MAAM,IAAIrS,6DAA6DqS,KAIzE,GAAI3O,GAAU1H,IAASJ,SAASW,aAAeqW,EAAMd,aAAarO,IAAI4O,GACpE,MAAM,IAAIrS,kEAAkEqS,KAIhF,SAASG,YAAY9O,EAAaC,GAChC,MAAMoB,EAASkO,OAAOvP,EAAQC,EAAIgP,MAAM,GAAI,IACtCO,EAAWxQ,KAAKiB,GAKtB,OAJKoB,EAAOmO,KACVnO,EAAOmO,GAAYtQ,SAGdmC,EAAOmO,GAGhB,SAAST,iBAAiB/O,EAAaC,GACrC,MAAMoB,EAASkO,OAAOvP,EAAQC,EAAIgP,MAAM,GAAI,IACtCO,EAAWxQ,KAAKiB,GACjBoB,EAAOmO,KACVnO,EAAOmO,GAAY,IAGrB,MAAMhV,EAAO0E,QAGb,OAFAmC,EAAOrC,KAAKiB,IAAOhE,KAAKzB,GAEjBA,EAGT,SAAS+U,OAAOvP,EAAac,GAC3B,OAAOA,EAAKH,OAAO,CAAC4N,EAAQkB,KACrBlB,EAAOkB,KACVlB,EAAOkB,GAAUvQ,SAEZ7D,MAAMC,QAAQiT,EAAOkB,IAAWzQ,KAAKuP,EAAOkB,IAAWlB,EAAOkB,IACpEzP,GAGL,SAASoP,YAAYtV,GACnB,MAAwB,iBAAVA,IAAuB2F,OAAO3F,GAG9C,SAAS8U,QAAQ3O,GACf,OAAOA,EAAIkB,KAAK,KChMlB,IAAYuO,oBAYIC,MAAMC,GACpB,OAAOA,EAAOtX,OAASoX,WAAWG,aAOpBC,OAAOF,GACrB,OAAOA,EAAOtX,OAASoX,WAAWK,cAOpBC,SAASJ,GACvB,OAAOA,EAAOtX,OAASoX,WAAWO,gBASpBC,OAAON,GACrB,OAAOA,EAAOtX,OAASoX,WAAWS,cASpBC,SAASR,GACvB,OAAOA,EAAOtX,OAASoX,WAAWW,OAKpC,SAAwBC,KAAKzL,EAAaC,EAAYyL,EAAa,IACjE,OAAI1L,IAAWC,GAASvE,WAAWsE,EAAQC,GAClC,GAGLzJ,MAAMC,QAAQuJ,IAAWxJ,MAAMC,QAAQwJ,GAClC0L,cAAc3L,EAAQC,EAAOyL,GAC3B1Q,SAASgF,IAAWhF,SAASiF,GAC/B2L,eAAe5L,EAAQC,EAAOyL,GAE9B,CACL,CACEjY,KAAMoX,WAAWK,KACjBQ,KAAAA,IAMR,SAASE,eAAe5L,EAAaC,EAAYyL,EAAa,IAC5D,IAAIG,EAAoB,GAGxB,MAAMC,EAAcxR,OAAO2B,KAAK+D,GAC1B+L,EAAgBD,EAAY3P,IAAIf,GAAOY,gBAAgBgE,EAAO5E,KAC9D4Q,EAAa1R,OAAO2B,KAAKgE,GACzBgM,EAAeD,EAAW7P,IAAIf,GAAOY,gBAAgBiE,EAAM7E,KAI3DmQ,EAAW,CAACW,EAAgBC,KAEhC,GADcA,EAAO7J,QAAQ4J,GACjB,EAAG,OAAO,EAEtB,MAAME,EAAaN,EAAYC,EAAczJ,QAAQ4J,IACrD,OAAQF,EAAWK,SAASD,IAkC9B,OA9BAN,EAAYxE,QAAQ,CAAClM,EAAK9F,KACxB,MAAMgX,EAAWZ,EAAKa,OAAOnR,GAC7B,GAAI4Q,EAAWK,SAASjR,GACtBmB,MAAMsP,EAASJ,KAAKzL,EAAO5E,GAAM6E,EAAM7E,GAAMkR,SACxC,GAAIf,EAASQ,EAAczW,GAAQ2W,GAAe,CACvD,MAAMO,EAAKR,EAAWC,EAAa3J,QAAQyJ,EAAczW,KACzDuW,EAAQzU,KAAK,CACX3D,KAAMoX,WAAWW,OACjBE,KAAAA,EACA/F,KAAMvK,EACNoR,GAAAA,SAGFX,EAAQzU,KAAK,CACX3D,KAAMoX,WAAWO,OACjBM,KAAMY,MAMZN,EAAW1E,QAAQ,CAAClM,EAAK9F,KAClBwW,EAAYO,SAASjR,IAASmQ,EAASU,EAAa3W,GAAQyW,IAC/DF,EAAQzU,KAAK,CACX3D,KAAMoX,WAAWG,IACjBU,KAAMA,EAAKa,OAAOnR,OAKjByQ,EAGT,SAASF,cAAc3L,EAAeC,EAAcyL,EAAa,IAC/D,IAAIG,EAAoB,GAGxB,MAAME,EAAgB/L,EAAO7D,IAAIH,iBAC3BiQ,EAAehM,EAAM9D,IAAIH,iBAG/BiQ,EAAa3E,QAAQ,CAACrS,EAAOK,KAC3B,MAAMmX,EAAWnX,GAASyW,EAAcjV,OAGxC,IAAK2V,GAAYV,EAAczW,KAAWL,EACxC,OAIF,MAAM0Q,EAAOoG,EAAczJ,QAAQrN,EAAOK,EAAQ,GAClD,IAAKmX,GAAY9G,GAAQ,EAAG,CAC1BkG,EAAQzU,KAAK,CACX3D,KAAMoX,WAAWS,KACjBI,KAAAA,EACA/F,KAAAA,EACA6G,GAAIlX,IAGN,MAAMmQ,EAAOsG,EAAcxJ,OAAOoD,EAAM,GAGxC,YAFAoG,EAAcxJ,OAAOjN,EAAO,KAAMmQ,GAMpC,MAAMiH,GAAWT,EAAaI,SAASN,EAAczW,IACrD,IAAKmX,GAAYC,EAIf,OAHAnQ,MAAMsP,EAASJ,KAAKzL,EAAO1K,GAAQ2K,EAAM3K,GAAQoW,EAAKa,OAAOjX,UAC7DyW,EAAczW,GAASL,GAMzB4W,EAAQzU,KAAK,CACX3D,KAAMoX,WAAWG,IACjBU,KAAMA,EAAKa,OAAOjX,KAEpByW,EAAcxJ,OAAOjN,EAAO,EAAGL,KAIjC,IAAK,IAAIwG,EAAIwQ,EAAanV,OAAQ2E,EAAIsQ,EAAcjV,OAAQ2E,IAC1DoQ,EAAQzU,KAAK,CACX3D,KAAMoX,WAAWO,OACjBM,KAAMA,EAAKa,OAAO9Q,KAItB,OAAOoQ,WCrLec,WAAWnZ,EAAYkY,GAC7C,IAAKA,EAAK5U,OAAQ,OAAOtD,EAEzB,GAAIW,WAAWX,GACb,OAAOmZ,WAAWnZ,EAAKyB,MAAOyW,GAGhC,MAAMzU,EAAqC,GAC3C,IAAI2V,EAqCJ,GApCI/X,SAASrB,IACXA,EAAK0M,MAAM2M,KAAK,CAAChN,EAAMvK,KACrB,IACE,IAAI8F,EAAY,GAChB,GAAIjH,WAAW0L,GACbzE,EAAMyE,EAAKzE,IAAInG,WACV,GAAItB,QAAQkM,GACjBzE,EAAMyE,EAAKzE,IAAIyE,KAAK5K,WACf,GAAIlB,aAAa8L,GAAO,CAG7B,MAAMiN,EAAa9Q,gBAFnBZ,EAAMyE,EAAKzE,IAAIyE,KAAK5K,OAGfgC,EAAQ6V,KACX7V,EAAQ6V,GAAc,GAExB,MAAMC,EAAc9V,EAAQ6V,KAE5B1R,EAAMA,EAAImR,OAAOQ,QACRxY,aAAasL,IAAS1L,WAAW0L,EAAKA,MAC/CzE,EAAMyE,EAAKA,KAAKzE,IAAInG,MACXV,aAAasL,KACtBzE,EAAM,CAAC9F,IAGT,SAAI8F,EAAItE,SAAUwE,YAAYF,EAAKsQ,EAAKtB,MAAM,EAAGhP,EAAItE,YACnD8V,EAAQD,WAAW9M,EAAM6L,EAAKtB,MAAMhP,EAAItE,UACjC,GAIT,MAAO+S,GACP,OAAO,MAKR+C,EACH,MAAM,IAAInV,qCAAqCiU,EAAKpP,KAAK,QAG3D,OAAOsQ,WAGOI,cAAcxZ,EAAYkY,GACxC,IACE,OAAOiB,WAAWnZ,EAAMkY,GACxB,MAAO7B,cAGKoD,WAAWzZ,EAAYkY,GACrC,IACInK,EADA2L,EAAcxB,EAElB,KAAOwB,EAAYpW,SAAWyK,GAE5BA,EAASyL,cAAcxZ,EADvB0Z,EAAcA,EAAY9C,MAAM,GAAI,IAItC,IAAK7I,EACH,MAAM,IAAI9J,6CAA6CiU,EAAKpP,KAAK,QAGnE,OAAOiF,WClDe4L,MAAM/K,EAAkBgL,EAActF,GAC5D,MACM5H,EAAQ,IADOpB,UAAUsD,IAGzBiL,EAAcjE,KAAKlJ,GAYzB,OAAO0I,OAFkB0E,aATW,CAClC7Z,KAAMJ,SAASK,SACf+F,IAAK,CAAExD,MAAO,CAAED,KAAM,EAAGG,OAAQ,GAAKJ,IAAK,CAAEC,KAAM,EAAGG,OAAQ,IAC9D+J,MAAAA,GAGuBkI,QAAQgF,EAAStF,GAC1B2D,KAAK4B,EAAaD,IAIHlN,OAGjC,SAASoN,aAAaC,EAAoBH,EAAmBvB,GAoG3D,OAzFAA,EAAQvE,QAAQyD,IACd,GAAID,MAAMC,GAAS,CACjB,MAAMlI,EAAQ8J,WAAWS,EAASrC,EAAOW,MACnCwB,EAAcnC,EAAOW,KAAKtB,MAAM,GAAI,GAC1C,IAUI7I,EAVAjM,EAAQ6E,KAAK4Q,EAAOW,MAEpB8B,EAAiBzZ,aAAa8O,GAClC,GAAIpI,UAAUnF,KAAW4X,EAAYL,KAAKpS,WAAY,CACpD,MAAMgT,EAAUT,cAAcO,EAAUL,EAAYX,OAAO,IACvDkB,GAAW1Z,aAAa0Z,KAC1BD,GAAiB,GAKrB,GAAI7Z,QAAQkP,GACVtB,EAASgM,OACJ,GAAIC,EAAgB,CACzBjM,EAASgM,EAIT,MAAMrG,EAAWqG,EACXvN,EAASgN,cAAc9F,EAAUgG,EAAYX,OAAOjX,EAAQ,IAC5D2K,EAAQ+M,cAAc9F,EAAUgG,EAAYX,OAAOjX,IAEvDA,EADE2K,EACMiH,EAAShH,MAAMoC,QAAQrC,GACtBD,EACDkH,EAAShH,MAAMoC,QAAQtC,GAAU,EAEjCkH,EAAShH,MAAMpJ,YAIrB3C,WADJoN,EAAS0L,WAAWM,EAAUxC,EAAOW,SACbnK,EAASA,EAAOtM,OAGtClB,aAAawN,IAAWlN,cAAckN,IAAWhO,WAAWgO,GAC9DqB,OAAO2K,EAAUhM,EAAQsB,EAAOvN,GAEhCsN,OAAO2K,EAAUhM,EAAQsB,QAEtB,GAAIoI,OAAOF,GAAS,CACzB,IAEIxJ,EAFAa,EAAWuK,WAAWY,EAAUxC,EAAOW,MACvCrJ,EAAcsK,WAAWS,EAASrC,EAAOW,MAGzCvX,WAAWiO,IAAajO,WAAWkO,IAErCd,EAASa,EACTA,EAAWA,EAASnN,MACpBoN,EAAcA,EAAYpN,OAE1BsM,EAAS0L,WAAWM,EAAUxC,EAAOW,MAGvC9N,QAAQ2P,EAAUhM,EAAQa,EAAUC,QAC/B,GAAI8I,SAASJ,GAAS,CAC3B,IAAIxJ,EAAS0L,WAAWM,EAAUxC,EAAOW,MACrCvX,WAAWoN,KAASA,EAASA,EAAOtM,OAExC,MAAMzB,EAAOmZ,WAAWY,EAAUxC,EAAOW,MAEzC1H,OAAOuJ,EAAUhM,EAAQ/N,QACpB,GAAI6X,OAAON,GAAS,CACzB,IAAIxJ,EAASoL,WAAWY,EAAUxC,EAAOW,MACrC5W,QAAQyM,KAASA,EAASA,EAAO1B,MACjC1L,WAAWoN,KAASA,EAASA,EAAOtM,OAExC,MAAMzB,EAAQ+N,EAAqBrB,MAAM6K,EAAOpF,MAEhD3B,OAAOuJ,EAAUhM,EAAQ/N,GACzBoP,OAAO2K,EAAUhM,EAAQ/N,EAAMuX,EAAOyB,SACjC,GAAIjB,SAASR,GAAS,CAC3B,IAAIxJ,EAASoL,WAAWY,EAAUxC,EAAOW,KAAKa,OAAOxB,EAAOpF,OAGxDtD,EAAcsK,WAAWS,EAASrC,EAAOW,KAAKa,OAAOxB,EAAOyB,KAI5D1X,QAAQyM,KAASA,EAASA,EAAO1B,MACjC/K,QAAQuN,KAAcA,EAAcA,EAAYxC,MAEpDjC,QAAQ2P,EAAUhM,EAAQA,EAAOnG,IAAKiH,EAAYjH,QAItD0J,YAAYyI,GACLA,WC1IOrP,MAAMjJ,GACpB,OAAOmU,KAAKtK,UAAU7J,GAAQA,YAGhBoH,UAAUpH,EAAY6S,GAEpC,OAAOc,OADUR,QAAQnT,EAAO6S,GACT5H,QHTzB,SAAY2K,GACVA,YACAA,cACAA,kBACAA,cACAA,kBALF,CAAYA,aAAAA"} \ No newline at end of file diff --git a/node_modules/toml-patch/dist/toml-patch.d.ts b/node_modules/toml-patch/dist/toml-patch.d.ts new file mode 100644 index 0000000..2aaf3d2 --- /dev/null +++ b/node_modules/toml-patch/dist/toml-patch.d.ts @@ -0,0 +1,18 @@ +//! toml-patch v0.2.3 - https://github.com/timhall/toml-patch - @license: MIT +// FILE GENERATED BY `rollup-plugin-dts@0.14.0` +// https://github.com/Swatinem/rollup-plugin-dts + +interface Format { + printWidth?: number; + tabWidth?: number; + useTabs?: boolean; + trailingComma?: boolean; + bracketSpacing?: boolean; +} + +declare function patch(existing: string, updated: any, format?: Format): string; + +declare function parse(value: string): any; +declare function stringify(value: any, format?: Format): string; + +export { parse, patch, stringify }; diff --git a/node_modules/toml-patch/dist/toml-patch.es.js b/node_modules/toml-patch/dist/toml-patch.es.js new file mode 100644 index 0000000..5c0eef6 --- /dev/null +++ b/node_modules/toml-patch/dist/toml-patch.es.js @@ -0,0 +1,2255 @@ +//! toml-patch v0.2.3 - https://github.com/timhall/toml-patch - @license: MIT +var NodeType; +(function (NodeType) { + NodeType["Document"] = "Document"; + NodeType["Table"] = "Table"; + NodeType["TableKey"] = "TableKey"; + NodeType["TableArray"] = "TableArray"; + NodeType["TableArrayKey"] = "TableArrayKey"; + NodeType["KeyValue"] = "KeyValue"; + NodeType["Key"] = "Key"; + NodeType["String"] = "String"; + NodeType["Integer"] = "Integer"; + NodeType["Float"] = "Float"; + NodeType["Boolean"] = "Boolean"; + NodeType["DateTime"] = "DateTime"; + NodeType["InlineArray"] = "InlineArray"; + NodeType["InlineItem"] = "InlineItem"; + NodeType["InlineTable"] = "InlineTable"; + NodeType["Comment"] = "Comment"; +})(NodeType || (NodeType = {})); +function isDocument(node) { + return node.type === NodeType.Document; +} +function isTable(node) { + return node.type === NodeType.Table; +} +function isTableKey(node) { + return node.type === NodeType.TableKey; +} +function isTableArray(node) { + return node.type === NodeType.TableArray; +} +function isTableArrayKey(node) { + return node.type === NodeType.TableArrayKey; +} +function isKeyValue(node) { + return node.type === NodeType.KeyValue; +} +function isInlineArray(node) { + return node.type === NodeType.InlineArray; +} +function isInlineItem(node) { + return node.type === NodeType.InlineItem; +} +function isInlineTable(node) { + return node.type === NodeType.InlineTable; +} +function isComment(node) { + return node.type === NodeType.Comment; +} +function hasItems(node) { + return (isDocument(node) || + isTable(node) || + isTableArray(node) || + isInlineTable(node) || + isInlineArray(node)); +} +function hasItem(node) { + return isTableKey(node) || isTableArrayKey(node) || isInlineItem(node); +} +function isBlock(node) { + return isKeyValue(node) || isTable(node) || isTableArray(node) || isComment(node); +} + +function iterator(value) { + return value[Symbol.iterator](); +} +class Cursor { + constructor(iterator) { + this.iterator = iterator; + this.index = -1; + this.value = undefined; + this.done = false; + this.peeked = null; + } + next() { + if (this.done) + return done(); + const result = this.peeked || this.iterator.next(); + this.index += 1; + this.value = result.value; + this.done = result.done; + this.peeked = null; + return result; + } + peek() { + if (this.done) + return done(); + if (this.peeked) + return this.peeked; + this.peeked = this.iterator.next(); + return this.peeked; + } + [Symbol.iterator]() { + return this; + } +} +function done() { + return { value: undefined, done: true }; +} + +function getSpan(location) { + return { + lines: location.end.line - location.start.line + 1, + columns: location.end.column - location.start.column + }; +} +function createLocate(input) { + const lines = findLines(input); + return (start, end) => { + return { + start: findPosition(lines, start), + end: findPosition(lines, end) + }; + }; +} +function findPosition(input, index) { + // abc\ndef\ng + // 0123 4567 8 + // 012 + // 0 + // + // lines = [3, 7, 9] + // + // c = 2: 0 -> 1, 2 - (undefined + 1 || 0) = 2 + // 3: 0 -> 1, 3 - (undefined + 1 || 0) = 3 + // e = 5: 1 -> 2, 5 - (3 + 1 || 0) = 1 + // g = 8: 2 -> 3, 8 - (7 + 1 || 0) = 0 + const lines = Array.isArray(input) ? input : findLines(input); + const line = lines.findIndex(line_index => line_index >= index) + 1; + const column = index - (lines[line - 2] + 1 || 0); + return { line, column }; +} +function getLine(input, position) { + const lines = findLines(input); + const start = lines[position.line - 2] || 0; + const end = lines[position.line - 1] || input.length; + return input.substr(start, end - start); +} +function findLines(input) { + // exec is stateful, so create new regexp each time + const BY_NEW_LINE = /[\r\n|\n]/g; + const indexes = []; + let match; + while ((match = BY_NEW_LINE.exec(input)) != null) { + indexes.push(match.index); + } + indexes.push(input.length + 1); + return indexes; +} +function clonePosition(position) { + return { line: position.line, column: position.column }; +} +function cloneLocation(location) { + return { start: clonePosition(location.start), end: clonePosition(location.end) }; +} +function zero() { + return { line: 1, column: 0 }; +} + +class ParseError extends Error { + constructor(input, position, message) { + let error_message = `Error parsing TOML (${position.line}, ${position.column + 1}):\n`; + if (input) { + const line = getLine(input, position); + const pointer = `${whitespace(position.column)}^`; + if (line) + error_message += `${line}\n${pointer}\n`; + } + error_message += message; + super(error_message); + this.line = position.line; + this.column = position.column; + } +} +function whitespace(count, character = ' ') { + return character.repeat(count); +} + +var TokenType; +(function (TokenType) { + TokenType["Bracket"] = "Bracket"; + TokenType["Curly"] = "Curly"; + TokenType["Equal"] = "Equal"; + TokenType["Comma"] = "Comma"; + TokenType["Dot"] = "Dot"; + TokenType["Comment"] = "Comment"; + TokenType["Literal"] = "Literal"; +})(TokenType || (TokenType = {})); +const IS_WHITESPACE = /\s/; +const IS_NEW_LINE = /(\r\n|\n)/; +const DOUBLE_QUOTE = `"`; +const SINGLE_QUOTE = `'`; +const SPACE = ' '; +const ESCAPE = '\\'; +const IS_VALID_LEADING_CHARACTER = /[\w,\d,\",\',\+,\-,\_]/; +function* tokenize(input) { + const cursor = new Cursor(iterator(input)); + cursor.next(); + const locate = createLocate(input); + while (!cursor.done) { + if (IS_WHITESPACE.test(cursor.value)) ; + else if (cursor.value === '[' || cursor.value === ']') { + // Handle special characters: [, ], {, }, =, comma + yield specialCharacter(cursor, locate, TokenType.Bracket); + } + else if (cursor.value === '{' || cursor.value === '}') { + yield specialCharacter(cursor, locate, TokenType.Curly); + } + else if (cursor.value === '=') { + yield specialCharacter(cursor, locate, TokenType.Equal); + } + else if (cursor.value === ',') { + yield specialCharacter(cursor, locate, TokenType.Comma); + } + else if (cursor.value === '.') { + yield specialCharacter(cursor, locate, TokenType.Dot); + } + else if (cursor.value === '#') { + // Handle comments = # -> EOL + yield comment(cursor, locate); + } + else { + const multiline_char = checkThree(input, cursor.index, SINGLE_QUOTE) || + checkThree(input, cursor.index, DOUBLE_QUOTE); + if (multiline_char) { + // Multi-line literals or strings = no escaping + yield multiline(cursor, locate, multiline_char, input); + } + else { + yield string(cursor, locate, input); + } + } + cursor.next(); + } +} +function specialCharacter(cursor, locate, type) { + return { type, raw: cursor.value, loc: locate(cursor.index, cursor.index + 1) }; +} +function comment(cursor, locate) { + const start = cursor.index; + let raw = cursor.value; + while (!cursor.peek().done && !IS_NEW_LINE.test(cursor.peek().value)) { + cursor.next(); + raw += cursor.value; + } + // Early exit is ok for comment, no closing conditions + return { + type: TokenType.Comment, + raw, + loc: locate(start, cursor.index + 1) + }; +} +function multiline(cursor, locate, multiline_char, input) { + const start = cursor.index; + let quotes = multiline_char + multiline_char + multiline_char; + let raw = quotes; + // Skip over quotes + cursor.next(); + cursor.next(); + cursor.next(); + while (!cursor.done && !checkThree(input, cursor.index, multiline_char)) { + raw += cursor.value; + cursor.next(); + } + if (cursor.done) { + throw new ParseError(input, findPosition(input, cursor.index), `Expected close of multiline string with ${quotes}, reached end of file`); + } + raw += quotes; + cursor.next(); + cursor.next(); + return { + type: TokenType.Literal, + raw, + loc: locate(start, cursor.index + 1) + }; +} +function string(cursor, locate, input) { + // Remaining possibilities: keys, strings, literals, integer, float, boolean + // + // Special cases: + // "..." -> quoted + // '...' -> quoted + // "...".'...' -> bare + // 0000-00-00 00:00:00 -> bare + // + // See https://github.com/toml-lang/toml#offset-date-time + // + // | For the sake of readability, you may replace the T delimiter between date and time with a space (as permitted by RFC 3339 section 5.6). + // | `odt4 = 1979-05-27 07:32:00Z` + // + // From RFC 3339: + // + // | NOTE: ISO 8601 defines date and time separated by "T". + // | Applications using this syntax may choose, for the sake of + // | readability, to specify a full-date and full-time separated by + // | (say) a space character. + // First, check for invalid characters + if (!IS_VALID_LEADING_CHARACTER.test(cursor.value)) { + throw new ParseError(input, findPosition(input, cursor.index), `Unsupported character "${cursor.value}". Expected ALPHANUMERIC, ", ', +, -, or _`); + } + const start = cursor.index; + let raw = cursor.value; + let double_quoted = cursor.value === DOUBLE_QUOTE; + let single_quoted = cursor.value === SINGLE_QUOTE; + const isFinished = (cursor) => { + if (cursor.peek().done) + return true; + const next_item = cursor.peek().value; + return (!(double_quoted || single_quoted) && + (IS_WHITESPACE.test(next_item) || + next_item === ',' || + next_item === '.' || + next_item === ']' || + next_item === '}' || + next_item === '=')); + }; + while (!cursor.done && !isFinished(cursor)) { + cursor.next(); + if (cursor.value === DOUBLE_QUOTE) + double_quoted = !double_quoted; + if (cursor.value === SINGLE_QUOTE && !double_quoted) + single_quoted = !single_quoted; + raw += cursor.value; + if (cursor.peek().done) + break; + let next_item = cursor.peek().value; + // If next character is escape and currently double-quoted, + // check for escaped quote + if (double_quoted && cursor.value === ESCAPE) { + if (next_item === DOUBLE_QUOTE) { + raw += DOUBLE_QUOTE; + cursor.next(); + } + else if (next_item === ESCAPE) { + raw += ESCAPE; + cursor.next(); + } + } + } + if (double_quoted || single_quoted) { + throw new ParseError(input, findPosition(input, start), `Expected close of string with ${double_quoted ? DOUBLE_QUOTE : SINGLE_QUOTE}`); + } + return { + type: TokenType.Literal, + raw, + loc: locate(start, cursor.index + 1) + }; +} +function checkThree(input, current, check) { + return (input[current] === check && + input[current + 1] === check && + input[current + 2] === check && + check); +} + +function last(values) { + return values[values.length - 1]; +} +function blank() { + return Object.create(null); +} +function isString(value) { + return typeof value === 'string'; +} +function isInteger(value) { + return typeof value === 'number' && value % 1 === 0; +} +function isFloat(value) { + return typeof value === 'number' && !isInteger(value); +} +function isBoolean(value) { + return typeof value === 'boolean'; +} +function isDate(value) { + return Object.prototype.toString.call(value) === '[object Date]'; +} +function isObject(value) { + return value && typeof value === 'object' && !isDate(value) && !Array.isArray(value); +} +function isIterable(value) { + return value != null && typeof value[Symbol.iterator] === 'function'; +} +function has(object, key) { + return Object.prototype.hasOwnProperty.call(object, key); +} +function arraysEqual(a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) { + if (a[i] !== b[i]) + return false; + } + return true; +} +function datesEqual(a, b) { + return isDate(a) && isDate(b) && a.toISOString() === b.toISOString(); +} +function pipe(value, ...fns) { + return fns.reduce((value, fn) => fn(value), value); +} +function stableStringify(object) { + if (isObject(object)) { + const key_values = Object.keys(object) + .sort() + .map(key => `${JSON.stringify(key)}:${stableStringify(object[key])}`); + return `{${key_values.join(',')}}`; + } + else if (Array.isArray(object)) { + return `[${object.map(stableStringify).join(',')}]`; + } + else { + return JSON.stringify(object); + } +} +function merge(target, values) { + // __mutating__: merge values into target + // Reference: https://dev.to/uilicious/javascript-array-push-is-945x-faster-than-array-concat-1oki + const original_length = target.length; + const added_length = values.length; + target.length = original_length + added_length; + for (let i = 0; i < added_length; i++) { + target[original_length + i] = values[i]; + } +} + +const TRIPLE_DOUBLE_QUOTE = `"""`; +const TRIPLE_SINGLE_QUOTE = `'''`; +const LF = '\\n'; +const CRLF = '\\r\\n'; +const IS_CRLF = /\r\n/g; +const IS_LF = /\n/g; +const IS_LEADING_NEW_LINE = /^(\r\n|\n)/; +const IS_LINE_ENDING_BACKSLASH = /\\\s*[\n\r\n]\s*/g; +function parseString(raw) { + if (raw.startsWith(TRIPLE_SINGLE_QUOTE)) { + return pipe(trim(raw, 3), trimLeadingWhitespace); + } + else if (raw.startsWith(SINGLE_QUOTE)) { + return trim(raw, 1); + } + else if (raw.startsWith(TRIPLE_DOUBLE_QUOTE)) { + return pipe(trim(raw, 3), trimLeadingWhitespace, lineEndingBackslash, escapeNewLines, unescape); + } + else if (raw.startsWith(DOUBLE_QUOTE)) { + return pipe(trim(raw, 1), unescape); + } + else { + return raw; + } +} +function unescape(escaped) { + // JSON.parse handles everything except \UXXXXXXXX + // replace those instances with code point, escape that, and then parse + const LARGE_UNICODE = /\\U[a-fA-F0-9]{8}/g; + const json_escaped = escaped.replace(LARGE_UNICODE, value => { + const code_point = parseInt(value.replace('\\U', ''), 16); + const as_string = String.fromCodePoint(code_point); + return trim(JSON.stringify(as_string), 1); + }); + return JSON.parse(`"${json_escaped}"`); +} +function trim(value, count) { + return value.substr(count, value.length - count * 2); +} +function trimLeadingWhitespace(value) { + return IS_LEADING_NEW_LINE.test(value) ? value.substr(1) : value; +} +function escapeNewLines(value) { + return value.replace(IS_CRLF, CRLF).replace(IS_LF, LF); +} +function lineEndingBackslash(value) { + return value.replace(IS_LINE_ENDING_BACKSLASH, ''); +} + +const TRUE = 'true'; +const FALSE = 'false'; +const HAS_E = /e/i; +const IS_DIVIDER = /\_/g; +const IS_INF = /inf/; +const IS_NAN = /nan/; +const IS_HEX = /^0x/; +const IS_OCTAL = /^0o/; +const IS_BINARY = /^0b/; +const IS_FULL_DATE = /(\d{4})-(\d{2})-(\d{2})/; +const IS_FULL_TIME = /(\d{2}):(\d{2}):(\d{2})/; +function* parseTOML(input) { + const tokens = tokenize(input); + const cursor = new Cursor(tokens); + while (!cursor.next().done) { + yield* walkBlock(cursor, input); + } +} +function* walkBlock(cursor, input) { + if (cursor.value.type === TokenType.Comment) { + yield comment$1(cursor); + } + else if (cursor.value.type === TokenType.Bracket) { + yield table(cursor, input); + } + else if (cursor.value.type === TokenType.Literal) { + yield* keyValue(cursor, input); + } + else { + throw new ParseError(input, cursor.value.loc.start, `Unexpected token "${cursor.value.type}". Expected Comment, Bracket, or String`); + } +} +function* walkValue(cursor, input) { + if (cursor.value.type === TokenType.Literal) { + if (cursor.value.raw[0] === DOUBLE_QUOTE || cursor.value.raw[0] === SINGLE_QUOTE) { + yield string$1(cursor); + } + else if (cursor.value.raw === TRUE || cursor.value.raw === FALSE) { + yield boolean(cursor); + } + else if (IS_FULL_DATE.test(cursor.value.raw) || IS_FULL_TIME.test(cursor.value.raw)) { + yield datetime(cursor, input); + } + else if ((!cursor.peek().done && cursor.peek().value.type === TokenType.Dot) || + IS_INF.test(cursor.value.raw) || + IS_NAN.test(cursor.value.raw) || + (HAS_E.test(cursor.value.raw) && !IS_HEX.test(cursor.value.raw))) { + yield float(cursor, input); + } + else { + yield integer(cursor); + } + } + else if (cursor.value.type === TokenType.Curly) { + yield inlineTable(cursor, input); + } + else if (cursor.value.type === TokenType.Bracket) { + const [inline_array, comments] = inlineArray(cursor, input); + yield inline_array; + yield* comments; + } + else { + throw new ParseError(input, cursor.value.loc.start, `Unrecognized token type "${cursor.value.type}". Expected String, Curly, or Bracket`); + } +} +function comment$1(cursor) { + // # line comment + // ^------------^ Comment + return { + type: NodeType.Comment, + loc: cursor.value.loc, + raw: cursor.value.raw + }; +} +function table(cursor, input) { + // Table or TableArray + // + // [ key ] + // ^-----^ TableKey + // ^-^ Key + // + // [[ key ]] + // ^ ------^ TableArrayKey + // ^-^ Key + // + // a = "b" < Items + // # c | + // d = "f" < + // + // ... + const type = !cursor.peek().done && cursor.peek().value.type === TokenType.Bracket + ? NodeType.TableArray + : NodeType.Table; + const is_table = type === NodeType.Table; + if (is_table && cursor.value.raw !== '[') { + throw new ParseError(input, cursor.value.loc.start, `Expected table opening "[", found ${cursor.value.raw}`); + } + if (!is_table && (cursor.value.raw !== '[' || cursor.peek().value.raw !== '[')) { + throw new ParseError(input, cursor.value.loc.start, `Expected array of tables opening "[[", found ${cursor.value.raw + cursor.peek().value.raw}`); + } + // Set start location from opening tag + const key = is_table + ? { + type: NodeType.TableKey, + loc: cursor.value.loc + } + : { + type: NodeType.TableArrayKey, + loc: cursor.value.loc + }; + // Skip to cursor.value for key value + cursor.next(); + if (type === NodeType.TableArray) + cursor.next(); + if (cursor.done) { + throw new ParseError(input, key.loc.start, `Expected table key, reached end of file`); + } + key.item = { + type: NodeType.Key, + loc: cloneLocation(cursor.value.loc), + raw: cursor.value.raw, + value: [parseString(cursor.value.raw)] + }; + while (!cursor.peek().done && cursor.peek().value.type === TokenType.Dot) { + cursor.next(); + const dot = cursor.value; + cursor.next(); + const before = ' '.repeat(dot.loc.start.column - key.item.loc.end.column); + const after = ' '.repeat(cursor.value.loc.start.column - dot.loc.end.column); + key.item.loc.end = cursor.value.loc.end; + key.item.raw += `${before}.${after}${cursor.value.raw}`; + key.item.value.push(parseString(cursor.value.raw)); + } + cursor.next(); + if (is_table && (cursor.done || cursor.value.raw !== ']')) { + throw new ParseError(input, cursor.done ? key.item.loc.end : cursor.value.loc.start, `Expected table closing "]", found ${cursor.done ? 'end of file' : cursor.value.raw}`); + } + if (!is_table && + (cursor.done || + cursor.peek().done || + cursor.value.raw !== ']' || + cursor.peek().value.raw !== ']')) { + throw new ParseError(input, cursor.done || cursor.peek().done ? key.item.loc.end : cursor.value.loc.start, `Expected array of tables closing "]]", found ${cursor.done || cursor.peek().done + ? 'end of file' + : cursor.value.raw + cursor.peek().value.raw}`); + } + // Set end location from closing tag + if (!is_table) + cursor.next(); + key.loc.end = cursor.value.loc.end; + // Add child items + let items = []; + while (!cursor.peek().done && cursor.peek().value.type !== TokenType.Bracket) { + cursor.next(); + merge(items, [...walkBlock(cursor, input)]); + } + return { + type: is_table ? NodeType.Table : NodeType.TableArray, + loc: { + start: clonePosition(key.loc.start), + end: items.length + ? clonePosition(items[items.length - 1].loc.end) + : clonePosition(key.loc.end) + }, + key: key, + items + }; +} +function keyValue(cursor, input) { + // 3. KeyValue + // + // key = value + // ^-^ key + // ^ equals + // ^---^ value + const key = { + type: NodeType.Key, + loc: cloneLocation(cursor.value.loc), + raw: cursor.value.raw, + value: [parseString(cursor.value.raw)] + }; + while (!cursor.peek().done && cursor.peek().value.type === TokenType.Dot) { + cursor.next(); + cursor.next(); + key.loc.end = cursor.value.loc.end; + key.raw += `.${cursor.value.raw}`; + key.value.push(parseString(cursor.value.raw)); + } + cursor.next(); + if (cursor.done || cursor.value.type !== TokenType.Equal) { + throw new ParseError(input, cursor.done ? key.loc.end : cursor.value.loc.start, `Expected "=" for key-value, found ${cursor.done ? 'end of file' : cursor.value.raw}`); + } + const equals = cursor.value.loc.start.column; + cursor.next(); + if (cursor.done) { + throw new ParseError(input, key.loc.start, `Expected value for key-value, reached end of file`); + } + const [value, ...comments] = walkValue(cursor, input); + return [ + { + type: NodeType.KeyValue, + key, + value: value, + loc: { + start: clonePosition(key.loc.start), + end: clonePosition(value.loc.end) + }, + equals + }, + ...comments + ]; +} +function string$1(cursor) { + return { + type: NodeType.String, + loc: cursor.value.loc, + raw: cursor.value.raw, + value: parseString(cursor.value.raw) + }; +} +function boolean(cursor) { + return { + type: NodeType.Boolean, + loc: cursor.value.loc, + value: cursor.value.raw === TRUE + }; +} +function datetime(cursor, input) { + // Possible values: + // + // Offset Date-Time + // | odt1 = 1979-05-27T07:32:00Z + // | odt2 = 1979-05-27T00:32:00-07:00 + // | odt3 = 1979-05-27T00:32:00.999999-07:00 + // | odt4 = 1979-05-27 07:32:00Z + // + // Local Date-Time + // | ldt1 = 1979-05-27T07:32:00 + // | ldt2 = 1979-05-27T00:32:00.999999 + // + // Local Date + // | ld1 = 1979-05-27 + // + // Local Time + // | lt1 = 07:32:00 + // | lt2 = 00:32:00.999999 + let loc = cursor.value.loc; + let raw = cursor.value.raw; + let value; + // If next token is string, + // check if raw is full date and following is full time + if (!cursor.peek().done && + cursor.peek().value.type === TokenType.Literal && + IS_FULL_DATE.test(raw) && + IS_FULL_TIME.test(cursor.peek().value.raw)) { + const start = loc.start; + cursor.next(); + loc = { start, end: cursor.value.loc.end }; + raw += ` ${cursor.value.raw}`; + } + if (!cursor.peek().done && cursor.peek().value.type === TokenType.Dot) { + const start = loc.start; + cursor.next(); + if (cursor.peek().done || cursor.peek().value.type !== TokenType.Literal) { + throw new ParseError(input, cursor.value.loc.end, `Expected fractional value for DateTime`); + } + cursor.next(); + loc = { start, end: cursor.value.loc.end }; + raw += `.${cursor.value.raw}`; + } + if (!IS_FULL_DATE.test(raw)) { + // For local time, use local ISO date + const [local_date] = new Date().toISOString().split('T'); + value = new Date(`${local_date}T${raw}`); + } + else { + value = new Date(raw.replace(' ', 'T')); + } + return { + type: NodeType.DateTime, + loc, + raw, + value + }; +} +function float(cursor, input) { + let loc = cursor.value.loc; + let raw = cursor.value.raw; + let value; + if (IS_INF.test(raw)) { + value = raw === '-inf' ? -Infinity : Infinity; + } + else if (IS_NAN.test(raw)) { + value = raw === '-nan' ? -NaN : NaN; + } + else if (!cursor.peek().done && cursor.peek().value.type === TokenType.Dot) { + const start = loc.start; + // From spec: + // | A fractional part is a decimal point followed by one or more digits. + // + // -> Don't have to handle "4." (i.e. nothing behind decimal place) + cursor.next(); + if (cursor.peek().done || cursor.peek().value.type !== TokenType.Literal) { + throw new ParseError(input, cursor.value.loc.end, `Expected fraction value for Float`); + } + cursor.next(); + raw += `.${cursor.value.raw}`; + loc = { start, end: cursor.value.loc.end }; + value = Number(raw.replace(IS_DIVIDER, '')); + } + else { + value = Number(raw.replace(IS_DIVIDER, '')); + } + return { type: NodeType.Float, loc, raw, value }; +} +function integer(cursor) { + // > Integer values -0 and +0 are valid and identical to an unprefixed zero + if (cursor.value.raw === '-0' || cursor.value.raw === '+0') { + return { + type: NodeType.Integer, + loc: cursor.value.loc, + raw: cursor.value.raw, + value: 0 + }; + } + let radix = 10; + if (IS_HEX.test(cursor.value.raw)) { + radix = 16; + } + else if (IS_OCTAL.test(cursor.value.raw)) { + radix = 8; + } + else if (IS_BINARY.test(cursor.value.raw)) { + radix = 2; + } + const value = parseInt(cursor + .value.raw.replace(IS_DIVIDER, '') + .replace(IS_OCTAL, '') + .replace(IS_BINARY, ''), radix); + return { + type: NodeType.Integer, + loc: cursor.value.loc, + raw: cursor.value.raw, + value + }; +} +function inlineTable(cursor, input) { + if (cursor.value.raw !== '{') { + throw new ParseError(input, cursor.value.loc.start, `Expected "{" for inline table, found ${cursor.value.raw}`); + } + // 6. InlineTable + const value = { + type: NodeType.InlineTable, + loc: cloneLocation(cursor.value.loc), + items: [] + }; + cursor.next(); + while (!cursor.done && + !(cursor.value.type === TokenType.Curly && cursor.value.raw === '}')) { + if (cursor.value.type === TokenType.Comma) { + const previous = value.items[value.items.length - 1]; + if (!previous) { + throw new ParseError(input, cursor.value.loc.start, 'Found "," without previous value in inline table'); + } + previous.comma = true; + previous.loc.end = cursor.value.loc.start; + cursor.next(); + continue; + } + const [item] = walkBlock(cursor, input); + if (item.type !== NodeType.KeyValue) { + throw new ParseError(input, cursor.value.loc.start, `Only key-values are supported in inline tables, found ${item.type}`); + } + const inline_item = { + type: NodeType.InlineItem, + loc: cloneLocation(item.loc), + item, + comma: false + }; + value.items.push(inline_item); + cursor.next(); + } + if (cursor.done || + cursor.value.type !== TokenType.Curly || + cursor.value.raw !== '}') { + throw new ParseError(input, cursor.done ? value.loc.start : cursor.value.loc.start, `Expected "}", found ${cursor.done ? 'end of file' : cursor.value.raw}`); + } + value.loc.end = cursor.value.loc.end; + return value; +} +function inlineArray(cursor, input) { + // 7. InlineArray + if (cursor.value.raw !== '[') { + throw new ParseError(input, cursor.value.loc.start, `Expected "[" for inline array, found ${cursor.value.raw}`); + } + const value = { + type: NodeType.InlineArray, + loc: cloneLocation(cursor.value.loc), + items: [] + }; + let comments = []; + cursor.next(); + while (!cursor.done && + !(cursor.value.type === TokenType.Bracket && cursor.value.raw === ']')) { + if (cursor.value.type === TokenType.Comma) { + const previous = value.items[value.items.length - 1]; + if (!previous) { + throw new ParseError(input, cursor.value.loc.start, 'Found "," without previous value for inline array'); + } + previous.comma = true; + previous.loc.end = cursor.value.loc.start; + } + else if (cursor.value.type === TokenType.Comment) { + comments.push(comment$1(cursor)); + } + else { + const [item, ...additional_comments] = walkValue(cursor, input); + const inline_item = { + type: NodeType.InlineItem, + loc: cloneLocation(item.loc), + item, + comma: false + }; + value.items.push(inline_item); + merge(comments, additional_comments); + } + cursor.next(); + } + if (cursor.done || + cursor.value.type !== TokenType.Bracket || + cursor.value.raw !== ']') { + throw new ParseError(input, cursor.done ? value.loc.start : cursor.value.loc.start, `Expected "]", found ${cursor.done ? 'end of file' : cursor.value.raw}`); + } + value.loc.end = cursor.value.loc.end; + return [value, comments]; +} + +function traverse(ast, visitor) { + if (isIterable(ast)) { + traverseArray(ast, null); + } + else { + traverseNode(ast, null); + } + function traverseArray(array, parent) { + for (const node of array) { + traverseNode(node, parent); + } + } + function traverseNode(node, parent) { + const visit = visitor[node.type]; + if (visit && typeof visit === 'function') { + visit(node, parent); + } + if (visit && visit.enter) { + visit.enter(node, parent); + } + switch (node.type) { + case NodeType.Document: + traverseArray(node.items, node); + break; + case NodeType.Table: + traverseNode(node.key, node); + traverseArray(node.items, node); + break; + case NodeType.TableKey: + traverseNode(node.item, node); + break; + case NodeType.TableArray: + traverseNode(node.key, node); + traverseArray(node.items, node); + break; + case NodeType.TableArrayKey: + traverseNode(node.item, node); + break; + case NodeType.KeyValue: + traverseNode(node.key, node); + traverseNode(node.value, node); + break; + case NodeType.InlineArray: + traverseArray(node.items, node); + break; + case NodeType.InlineItem: + traverseNode(node.item, node); + break; + case NodeType.InlineTable: + traverseArray(node.items, node); + break; + case NodeType.Key: + case NodeType.String: + case NodeType.Integer: + case NodeType.Float: + case NodeType.Boolean: + case NodeType.DateTime: + case NodeType.Comment: + break; + default: + throw new Error(`Unrecognized node type "${node.type}"`); + } + if (visit && visit.exit) { + visit.exit(node, parent); + } + } +} + +const enter_offsets = new WeakMap(); +const getEnter = (root) => { + if (!enter_offsets.has(root)) { + enter_offsets.set(root, new WeakMap()); + } + return enter_offsets.get(root); +}; +const exit_offsets = new WeakMap(); +const getExit = (root) => { + if (!exit_offsets.has(root)) { + exit_offsets.set(root, new WeakMap()); + } + return exit_offsets.get(root); +}; +function replace(root, parent, existing, replacement) { + // First, replace existing node + // (by index for items, item, or key/value) + if (hasItems(parent)) { + const index = parent.items.indexOf(existing); + if (index < 0) + throw new Error(`Could not find existing item in parent node for replace`); + parent.items.splice(index, 1, replacement); + } + else if (hasItem(parent)) { + parent.item = replacement; + } + else if (isKeyValue(parent)) { + if (parent.key === existing) { + parent.key = replacement; + } + else { + parent.value = replacement; + } + } + else { + throw new Error(`Unsupported parent type "${parent.type}" for replace`); + } + // Shift the replacement node into the same start position as existing + const shift = { + lines: existing.loc.start.line - replacement.loc.start.line, + columns: existing.loc.start.column - replacement.loc.start.column + }; + shiftNode(replacement, shift); + // Apply offsets after replacement node + const existing_span = getSpan(existing.loc); + const replacement_span = getSpan(replacement.loc); + const offset = { + lines: replacement_span.lines - existing_span.lines, + columns: replacement_span.columns - existing_span.columns + }; + addOffset(offset, getExit(root), replacement, existing); +} +function insert(root, parent, child, index) { + if (!hasItems(parent)) { + throw new Error(`Unsupported parent type "${parent.type}" for insert`); + } + index = index != null ? index : parent.items.length; + let shift; + let offset; + if (isInlineArray(parent) || isInlineTable(parent)) { + ({ shift, offset } = insertInline(parent, child, index)); + } + else { + ({ shift, offset } = insertOnNewLine(parent, child, index)); + } + shiftNode(child, shift); + // The child element is placed relative to the previous element, + // if the previous element has an offset, need to position relative to that + // -> Move previous offset to child's offset + const previous = parent.items[index - 1]; + const previous_offset = previous && getExit(root).get(previous); + if (previous_offset) { + offset.lines += previous_offset.lines; + offset.columns += previous_offset.columns; + // Account for comma overlay + // + // a = [b, e] + // a = [b, c, e] + // ^---^ + // a = [b, c, d, e] + // ^---^ + if (isInlineItem(child) && previous && parent.items[index + 1]) { + offset.columns -= 2; + } + getExit(root).delete(previous); + } + const offsets = getExit(root); + offsets.set(child, offset); +} +function insertOnNewLine(parent, child, index) { + if (!isBlock(child)) { + throw new Error(`Incompatible child type "${child.type}"`); + } + const previous = parent.items[index - 1]; + const use_first_line = isDocument(parent) && !parent.items.length; + parent.items.splice(index, 0, child); + // Set start location from previous item or start of array + // (previous is undefined for empty array or inserting at first item) + const start = previous + ? { + line: previous.loc.end.line, + column: !isComment(previous) ? previous.loc.start.column : parent.loc.start.column + } + : clonePosition(parent.loc.start); + const is_block = isTable(child) || isTableArray(child); + let leading_lines = 0; + if (use_first_line) ; + else if (is_block) { + leading_lines = 2; + } + else { + leading_lines = 1; + } + start.line += leading_lines; + const shift = { + lines: start.line - child.loc.start.line, + columns: start.column - child.loc.start.column + }; + // Apply offsets after child node + const child_span = getSpan(child.loc); + const offset = { + lines: child_span.lines + (leading_lines - 1), + columns: child_span.columns + }; + return { shift, offset }; +} +function insertInline(parent, child, index) { + if (!isInlineItem(child)) { + throw new Error(`Incompatible child type "${child.type}"`); + } + // Store preceding node and insert + const previous = index != null ? parent.items[index - 1] : last(parent.items); + const is_last = index == null || index === parent.items.length; + parent.items.splice(index, 0, child); + // Add commas as-needed + const leading_comma = !!previous; + const trailing_comma = !is_last; + const last_comma = is_last && child.comma === true; + if (leading_comma) { + previous.comma = true; + } + if (trailing_comma) { + child.comma = true; + } + // Use a new line for documents, children of Table/TableArray, + // and if an inline table is using new lines + const use_new_line = isInlineArray(parent) && perLine(parent); + // Set start location from previous item or start of array + // (previous is undefined for empty array or inserting at first item) + const start = previous + ? { + line: previous.loc.end.line, + column: use_new_line + ? !isComment(previous) + ? previous.loc.start.column + : parent.loc.start.column + : previous.loc.end.column + } + : clonePosition(parent.loc.start); + let leading_lines = 0; + if (use_new_line) { + leading_lines = 1; + } + else { + const skip_comma = 2; + const skip_bracket = 1; + start.column += leading_comma ? skip_comma : skip_bracket; + } + start.line += leading_lines; + const shift = { + lines: start.line - child.loc.start.line, + columns: start.column - child.loc.start.column + }; + // Apply offsets after child node + const child_span = getSpan(child.loc); + const offset = { + lines: child_span.lines + (leading_lines - 1), + columns: child_span.columns + (leading_comma || trailing_comma ? 2 : 0) + (last_comma ? 1 : 0) + }; + return { shift, offset }; +} +function remove(root, parent, node) { + // Remove an element from the parent's items + // (supports Document, Table, TableArray, InlineTable, and InlineArray + // + // X + // [ 1, 2, 3 ] + // ^-^ + // -> Remove element 2 and apply 0,-3 offset to 1 + // + // [table] + // a = 1 + // b = 2 # X + // c = 3 + // -> Remove element 2 and apply -1,0 offset to 1 + if (!hasItems(parent)) { + throw new Error(`Unsupported parent type "${parent.type}" for remove`); + } + let index = parent.items.indexOf(node); + if (index < 0) { + // Try again, looking at child items for nodes like InlineArrayItem + index = parent.items.findIndex(item => hasItem(item) && item.item === node); + if (index < 0) { + throw new Error('Could not find node in parent for removal'); + } + node = parent.items[index]; + } + const previous = parent.items[index - 1]; + let next = parent.items[index + 1]; + // Remove node + parent.items.splice(index, 1); + let removed_span = getSpan(node.loc); + // Remove an associated comment that appears on the same line + // + // [table] + // a = 1 + // b = 2 # remove this too + // c = 3 + // + // TODO InlineTable - this only applies to comments in Table/TableArray + if (next && isComment(next) && next.loc.start.line === node.loc.end.line) { + // Add comment to removed + removed_span = getSpan({ start: node.loc.start, end: next.loc.end }); + // Shift to next item + // (use same index since node has already been removed) + next = parent.items[index + 1]; + // Remove comment + parent.items.splice(index, 1); + } + // For inline tables and arrays, check whether the line should be kept + const is_inline = previous && isInlineItem(previous); + const previous_on_same_line = previous && previous.loc.end.line === node.loc.start.line; + const next_on_sameLine = next && next.loc.start.line === node.loc.end.line; + const keep_line = is_inline && (previous_on_same_line || next_on_sameLine); + const offset = { + lines: -(removed_span.lines - (keep_line ? 1 : 0)), + columns: -removed_span.columns + }; + // Offset for comma and remove comma from previous (if-needed) + if (is_inline && previous_on_same_line) { + offset.columns -= 2; + } + if (is_inline && previous && !next) { + previous.comma = false; + } + // Apply offsets after preceding node or before children of parent node + const target = previous || parent; + const target_offsets = previous ? getExit(root) : getEnter(root); + const node_offsets = getExit(root); + const previous_offset = target_offsets.get(target); + if (previous_offset) { + offset.lines += previous_offset.lines; + offset.columns += previous_offset.columns; + } + const removed_offset = node_offsets.get(node); + if (removed_offset) { + offset.lines += removed_offset.lines; + offset.columns += removed_offset.columns; + } + target_offsets.set(target, offset); +} +function applyBracketSpacing(root, node, bracket_spacing = true) { + // Can only add bracket spacing currently + if (!bracket_spacing) + return; + if (!node.items.length) + return; + // Apply enter to node so that items are affected + addOffset({ lines: 0, columns: 1 }, getEnter(root), node); + // Apply exit to last node in items + const last_item = last(node.items); + addOffset({ lines: 0, columns: 1 }, getExit(root), last_item); +} +function applyTrailingComma(root, node, trailing_commas = false) { + // Can only add trailing comma currently + if (!trailing_commas) + return; + if (!node.items.length) + return; + const last_item = last(node.items); + last_item.comma = true; + addOffset({ lines: 0, columns: 1 }, getExit(root), last_item); +} +function applyWrites(root) { + const enter = getEnter(root); + const exit = getExit(root); + const offset = { + lines: 0, + columns: {} + }; + function shiftStart(node) { + node.loc.start.line += offset.lines; + node.loc.start.column += offset.columns[node.loc.start.line] || 0; + const entering = enter.get(node); + if (entering) { + offset.lines += entering.lines; + offset.columns[node.loc.start.line] = + (offset.columns[node.loc.start.line] || 0) + entering.columns; + } + } + function shiftEnd(node) { + node.loc.end.line += offset.lines; + node.loc.end.column += offset.columns[node.loc.end.line] || 0; + const exiting = exit.get(node); + if (exiting) { + offset.lines += exiting.lines; + offset.columns[node.loc.end.line] = + (offset.columns[node.loc.end.line] || 0) + exiting.columns; + } + } + const shiftLocation = { + enter: shiftStart, + exit: shiftEnd + }; + traverse(root, { + [NodeType.Document]: shiftLocation, + [NodeType.Table]: shiftLocation, + [NodeType.TableArray]: shiftLocation, + [NodeType.InlineTable]: shiftLocation, + [NodeType.InlineArray]: shiftLocation, + [NodeType.InlineItem]: shiftLocation, + [NodeType.TableKey]: shiftLocation, + [NodeType.TableArrayKey]: shiftLocation, + [NodeType.KeyValue]: { + enter(node) { + const start_line = node.loc.start.line + offset.lines; + const key_offset = exit.get(node.key); + node.equals += (offset.columns[start_line] || 0) + (key_offset ? key_offset.columns : 0); + shiftStart(node); + }, + exit: shiftEnd + }, + [NodeType.Key]: shiftLocation, + [NodeType.String]: shiftLocation, + [NodeType.Integer]: shiftLocation, + [NodeType.Float]: shiftLocation, + [NodeType.Boolean]: shiftLocation, + [NodeType.DateTime]: shiftLocation, + [NodeType.Comment]: shiftLocation + }); + enter_offsets.delete(root); + exit_offsets.delete(root); +} +function shiftNode(node, span, options = {}) { + const { first_line_only = false } = options; + const start_line = node.loc.start.line; + const { lines, columns } = span; + const move = (node) => { + if (!first_line_only || node.loc.start.line === start_line) { + node.loc.start.column += columns; + node.loc.end.column += columns; + } + node.loc.start.line += lines; + node.loc.end.line += lines; + }; + traverse(node, { + [NodeType.Table]: move, + [NodeType.TableKey]: move, + [NodeType.TableArray]: move, + [NodeType.TableArrayKey]: move, + [NodeType.KeyValue](node) { + move(node); + node.equals += columns; + }, + [NodeType.Key]: move, + [NodeType.String]: move, + [NodeType.Integer]: move, + [NodeType.Float]: move, + [NodeType.Boolean]: move, + [NodeType.DateTime]: move, + [NodeType.InlineArray]: move, + [NodeType.InlineItem]: move, + [NodeType.InlineTable]: move, + [NodeType.Comment]: move + }); + return node; +} +function perLine(array) { + if (!array.items.length) + return false; + const span = getSpan(array.loc); + return span.lines > array.items.length; +} +function addOffset(offset, offsets, node, from) { + const previous_offset = offsets.get(from || node); + if (previous_offset) { + offset.lines += previous_offset.lines; + offset.columns += previous_offset.columns; + } + offsets.set(node, offset); +} + +function generateDocument() { + return { + type: NodeType.Document, + loc: { start: zero(), end: zero() }, + items: [] + }; +} +function generateTable(key) { + const table_key = generateTableKey(key); + return { + type: NodeType.Table, + loc: cloneLocation(table_key.loc), + key: table_key, + items: [] + }; +} +function generateTableKey(key) { + const raw = keyValueToRaw(key); + return { + type: NodeType.TableKey, + loc: { + start: zero(), + end: { line: 1, column: raw.length + 2 } + }, + item: { + type: NodeType.Key, + loc: { + start: { line: 1, column: 1 }, + end: { line: 1, column: raw.length + 1 } + }, + value: key, + raw + } + }; +} +function generateTableArray(key) { + const table_array_key = generateTableArrayKey(key); + return { + type: NodeType.TableArray, + loc: cloneLocation(table_array_key.loc), + key: table_array_key, + items: [] + }; +} +function generateTableArrayKey(key) { + const raw = keyValueToRaw(key); + return { + type: NodeType.TableArrayKey, + loc: { + start: zero(), + end: { line: 1, column: raw.length + 4 } + }, + item: { + type: NodeType.Key, + loc: { + start: { line: 1, column: 2 }, + end: { line: 1, column: raw.length + 2 } + }, + value: key, + raw + } + }; +} +function generateKeyValue(key, value) { + const key_node = generateKey(key); + const { column } = key_node.loc.end; + const equals = column + 1; + shiftNode(value, { lines: 0, columns: column + 3 - value.loc.start.column }, { first_line_only: true }); + return { + type: NodeType.KeyValue, + loc: { + start: clonePosition(key_node.loc.start), + end: clonePosition(value.loc.end) + }, + key: key_node, + equals, + value + }; +} +const IS_BARE_KEY = /[\w,\d,\_,\-]+/; +function keyValueToRaw(value) { + return value.map(part => (IS_BARE_KEY.test(part) ? part : JSON.stringify(part))).join('.'); +} +function generateKey(value) { + const raw = keyValueToRaw(value); + return { + type: NodeType.Key, + loc: { start: zero(), end: { line: 1, column: raw.length } }, + raw, + value + }; +} +function generateString(value) { + const raw = JSON.stringify(value); + return { + type: NodeType.String, + loc: { start: zero(), end: { line: 1, column: raw.length } }, + raw, + value + }; +} +function generateInteger(value) { + const raw = value.toString(); + return { + type: NodeType.Integer, + loc: { start: zero(), end: { line: 1, column: raw.length } }, + raw, + value + }; +} +function generateFloat(value) { + const raw = value.toString(); + return { + type: NodeType.Float, + loc: { start: zero(), end: { line: 1, column: raw.length } }, + raw, + value + }; +} +function generateBoolean(value) { + return { + type: NodeType.Boolean, + loc: { start: zero(), end: { line: 1, column: value ? 4 : 5 } }, + value + }; +} +function generateDateTime(value) { + const raw = value.toISOString(); + return { + type: NodeType.DateTime, + loc: { start: zero(), end: { line: 1, column: raw.length } }, + raw, + value + }; +} +function generateInlineArray() { + return { + type: NodeType.InlineArray, + loc: { start: zero(), end: { line: 1, column: 2 } }, + items: [] + }; +} +function generateInlineItem(item) { + return { + type: NodeType.InlineItem, + loc: cloneLocation(item.loc), + item, + comma: false + }; +} +function generateInlineTable() { + return { + type: NodeType.InlineTable, + loc: { start: zero(), end: { line: 1, column: 2 } }, + items: [] + }; +} + +function formatTopLevel(document) { + const move_to_top_level = document.items.filter(item => { + if (!isKeyValue(item)) + return false; + const is_inline_table = isInlineTable(item.value); + const is_inline_array = isInlineArray(item.value) && + item.value.items.length && + isInlineTable(item.value.items[0].item); + return is_inline_table || is_inline_array; + }); + move_to_top_level.forEach(node => { + remove(document, document, node); + if (isInlineTable(node.value)) { + insert(document, document, formatTable(node)); + } + else { + formatTableArray(node).forEach(table_array => { + insert(document, document, table_array); + }); + } + }); + applyWrites(document); + return document; +} +function formatTable(key_value) { + const table = generateTable(key_value.key.value); + for (const item of key_value.value.items) { + insert(table, table, item.item); + } + applyWrites(table); + return table; +} +function formatTableArray(key_value) { + const root = generateDocument(); + for (const inline_array_item of key_value.value.items) { + const table_array = generateTableArray(key_value.key.value); + insert(root, root, table_array); + for (const inline_table_item of inline_array_item.item.items) { + insert(root, table_array, inline_table_item.item); + } + } + applyWrites(root); + return root.items; +} +function formatPrintWidth(document, format) { + // TODO + return document; +} +function formatEmptyLines(document) { + let shift = 0; + let previous = 0; + for (const item of document.items) { + if (previous === 0 && item.loc.start.line > 1) { + // Remove leading newlines + shift = 1 - item.loc.start.line; + } + else if (item.loc.start.line + shift > previous + 2) { + shift += previous + 2 - (item.loc.start.line + shift); + } + shiftNode(item, { + lines: shift, + columns: 0 + }); + previous = item.loc.end.line; + } + return document; +} + +const default_format = { + printWidth: 80, + trailingComma: false, + bracketSpacing: true +}; +function parseJS(value, format = {}) { + format = Object.assign({}, default_format, format); + value = toJSON(value); + const document = generateDocument(); + for (const item of walkObject(value, format)) { + insert(document, document, item); + } + applyWrites(document); + // Heuristics: + // 1. Top-level objects/arrays should be tables/table arrays + // 2. Convert objects/arrays to tables/table arrays based on print width + const formatted = pipe(document, formatTopLevel, document => formatPrintWidth(document), formatEmptyLines); + return formatted; +} +function* walkObject(object, format) { + for (const key of Object.keys(object)) { + yield generateKeyValue([key], walkValue$1(object[key], format)); + } +} +function walkValue$1(value, format) { + if (value == null) { + throw new Error('"null" and "undefined" values are not supported'); + } + if (isString(value)) { + return generateString(value); + } + else if (isInteger(value)) { + return generateInteger(value); + } + else if (isFloat(value)) { + return generateFloat(value); + } + else if (isBoolean(value)) { + return generateBoolean(value); + } + else if (isDate(value)) { + return generateDateTime(value); + } + else if (Array.isArray(value)) { + return walkInlineArray(value, format); + } + else { + return walkInlineTable(value, format); + } +} +function walkInlineArray(value, format) { + const inline_array = generateInlineArray(); + for (const element of value) { + const item = walkValue$1(element, format); + const inline_array_item = generateInlineItem(item); + insert(inline_array, inline_array, inline_array_item); + } + applyBracketSpacing(inline_array, inline_array, format.bracketSpacing); + applyTrailingComma(inline_array, inline_array, format.trailingComma); + applyWrites(inline_array); + return inline_array; +} +function walkInlineTable(value, format) { + value = toJSON(value); + if (!isObject(value)) + return walkValue$1(value, format); + const inline_table = generateInlineTable(); + const items = [...walkObject(value, format)]; + for (const item of items) { + const inline_table_item = generateInlineItem(item); + insert(inline_table, inline_table, inline_table_item); + } + applyBracketSpacing(inline_table, inline_table, format.bracketSpacing); + applyTrailingComma(inline_table, inline_table, format.trailingComma); + applyWrites(inline_table); + return inline_table; +} +function toJSON(value) { + return value && !isDate(value) && typeof value.toJSON === 'function' ? value.toJSON() : value; +} + +const BY_NEW_LINE = /(\r\n|\n)/g; +function toTOML(ast, newline = '\n') { + const lines = []; + traverse(ast, { + [NodeType.TableKey](node) { + const { start, end } = node.loc; + write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '['); + write(lines, { start: { line: end.line, column: end.column - 1 }, end }, ']'); + }, + [NodeType.TableArrayKey](node) { + const { start, end } = node.loc; + write(lines, { start, end: { line: start.line, column: start.column + 2 } }, '[['); + write(lines, { start: { line: end.line, column: end.column - 2 }, end }, ']]'); + }, + [NodeType.KeyValue](node) { + const { start: { line } } = node.loc; + write(lines, { start: { line, column: node.equals }, end: { line, column: node.equals + 1 } }, '='); + }, + [NodeType.Key](node) { + write(lines, node.loc, node.raw); + }, + [NodeType.String](node) { + write(lines, node.loc, node.raw); + }, + [NodeType.Integer](node) { + write(lines, node.loc, node.raw); + }, + [NodeType.Float](node) { + write(lines, node.loc, node.raw); + }, + [NodeType.Boolean](node) { + write(lines, node.loc, node.value.toString()); + }, + [NodeType.DateTime](node) { + write(lines, node.loc, node.raw); + }, + [NodeType.InlineArray](node) { + const { start, end } = node.loc; + write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '['); + write(lines, { start: { line: end.line, column: end.column - 1 }, end }, ']'); + }, + [NodeType.InlineTable](node) { + const { start, end } = node.loc; + write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '{'); + write(lines, { start: { line: end.line, column: end.column - 1 }, end }, '}'); + }, + [NodeType.InlineItem](node) { + if (!node.comma) + return; + const start = node.loc.end; + write(lines, { start, end: { line: start.line, column: start.column + 1 } }, ','); + }, + [NodeType.Comment](node) { + write(lines, node.loc, node.raw); + } + }); + return lines.join(newline) + newline; +} +function write(lines, loc, raw) { + const raw_lines = raw.split(BY_NEW_LINE); + const expected_lines = loc.end.line - loc.start.line + 1; + if (raw_lines.length !== expected_lines) { + throw new Error(`Mismatch between location and raw string, expected ${expected_lines} lines for "${raw}"`); + } + for (let i = loc.start.line; i <= loc.end.line; i++) { + const line = getLine$1(lines, i); + const is_start_line = i === loc.start.line; + const is_end_line = i === loc.end.line; + const before = is_start_line + ? line.substr(0, loc.start.column).padEnd(loc.start.column, SPACE) + : ''; + const after = is_end_line ? line.substr(loc.end.column) : ''; + lines[i - 1] = before + raw_lines[i - loc.start.line] + after; + } +} +function getLine$1(lines, index) { + if (!lines[index - 1]) { + for (let i = 0; i < index; i++) { + if (!lines[i]) + lines[i] = ''; + } + } + return lines[index - 1]; +} + +function toJS(ast, input = '') { + const result = blank(); + const tables = new Set(); + const table_arrays = new Set(); + const defined = new Set(); + let active = result; + let previous_active; + let skip = false; + traverse(ast, { + [NodeType.Table](node) { + const key = node.key.item.value; + try { + validateKey(result, key, node.type, { tables, table_arrays, defined }); + } + catch (err) { + throw new ParseError(input, node.key.loc.start, err.message); + } + const joined_key = joinKey(key); + tables.add(joined_key); + defined.add(joined_key); + active = ensureTable(result, key); + }, + [NodeType.TableArray](node) { + const key = node.key.item.value; + try { + validateKey(result, key, node.type, { tables, table_arrays, defined }); + } + catch (err) { + throw new ParseError(input, node.key.loc.start, err.message); + } + const joined_key = joinKey(key); + table_arrays.add(joined_key); + defined.add(joined_key); + active = ensureTableArray(result, key); + }, + [NodeType.KeyValue]: { + enter(node) { + if (skip) + return; + const key = node.key.value; + try { + validateKey(active, key, node.type, { tables, table_arrays, defined }); + } + catch (err) { + throw new ParseError(input, node.key.loc.start, err.message); + } + const value = toValue(node.value); + const target = key.length > 1 ? ensureTable(active, key.slice(0, -1)) : active; + target[last(key)] = value; + defined.add(joinKey(key)); + if (isInlineTable(node.value)) { + previous_active = active; + active = value; + } + }, + exit(node) { + if (isInlineTable(node.value)) { + active = previous_active; + } + } + }, + [NodeType.InlineTable]: { + enter() { + // Handled by toValue + skip = true; + }, + exit() { + skip = false; + } + } + }); + return result; +} +function toValue(node) { + switch (node.type) { + case NodeType.InlineTable: + const result = blank(); + node.items.forEach(({ item }) => { + const key = item.key.value; + const value = toValue(item.value); + const target = key.length > 1 ? ensureTable(result, key.slice(0, -1)) : result; + target[last(key)] = value; + }); + return result; + case NodeType.InlineArray: + return node.items.map(item => toValue(item.item)); + case NodeType.String: + case NodeType.Integer: + case NodeType.Float: + case NodeType.Boolean: + case NodeType.DateTime: + return node.value; + default: + throw new Error(`Unrecognized value type "${node.type}"`); + } +} +function validateKey(object, key, type, state) { + // 1. Cannot override primitive value + let parts = []; + let index = 0; + for (const part of key) { + parts.push(part); + if (!has(object, part)) + return; + if (isPrimitive(object[part])) { + throw new Error(`Invalid key, a value has already been defined for ${parts.join('.')}`); + } + const joined_parts = joinKey(parts); + if (Array.isArray(object[part]) && !state.table_arrays.has(joined_parts)) { + throw new Error(`Invalid key, cannot add to a static array at ${joined_parts}`); + } + const next_is_last = index++ < key.length - 1; + object = Array.isArray(object[part]) && next_is_last ? last(object[part]) : object[part]; + } + const joined_key = joinKey(key); + // 2. Cannot override table + if (object && type === NodeType.Table && state.defined.has(joined_key)) { + throw new Error(`Invalid key, a table has already been defined named ${joined_key}`); + } + // 3. Cannot add table array to static array or table + if (object && type === NodeType.TableArray && !state.table_arrays.has(joined_key)) { + throw new Error(`Invalid key, cannot add an array of tables to a table at ${joined_key}`); + } +} +function ensureTable(object, key) { + const target = ensure(object, key.slice(0, -1)); + const last_key = last(key); + if (!target[last_key]) { + target[last_key] = blank(); + } + return target[last_key]; +} +function ensureTableArray(object, key) { + const target = ensure(object, key.slice(0, -1)); + const last_key = last(key); + if (!target[last_key]) { + target[last_key] = []; + } + const next = blank(); + target[last(key)].push(next); + return next; +} +function ensure(object, keys) { + return keys.reduce((active, subkey) => { + if (!active[subkey]) { + active[subkey] = blank(); + } + return Array.isArray(active[subkey]) ? last(active[subkey]) : active[subkey]; + }, object); +} +function isPrimitive(value) { + return typeof value !== 'object' && !isDate(value); +} +function joinKey(key) { + return key.join('.'); +} + +var ChangeType; +(function (ChangeType) { + ChangeType["Add"] = "Add"; + ChangeType["Edit"] = "Edit"; + ChangeType["Remove"] = "Remove"; + ChangeType["Move"] = "Move"; + ChangeType["Rename"] = "Rename"; +})(ChangeType || (ChangeType = {})); +function isAdd(change) { + return change.type === ChangeType.Add; +} +function isEdit(change) { + return change.type === ChangeType.Edit; +} +function isRemove(change) { + return change.type === ChangeType.Remove; +} +function isMove(change) { + return change.type === ChangeType.Move; +} +function isRename(change) { + return change.type === ChangeType.Rename; +} +function diff(before, after, path = []) { + if (before === after || datesEqual(before, after)) { + return []; + } + if (Array.isArray(before) && Array.isArray(after)) { + return compareArrays(before, after, path); + } + else if (isObject(before) && isObject(after)) { + return compareObjects(before, after, path); + } + else { + return [ + { + type: ChangeType.Edit, + path + } + ]; + } +} +function compareObjects(before, after, path = []) { + let changes = []; + // 1. Get keys and stable values + const before_keys = Object.keys(before); + const before_stable = before_keys.map(key => stableStringify(before[key])); + const after_keys = Object.keys(after); + const after_stable = after_keys.map(key => stableStringify(after[key])); + // Check for rename by seeing if object is in both before and after + // and that key is no longer used in after + const isRename = (stable, search) => { + const index = search.indexOf(stable); + if (index < 0) + return false; + const before_key = before_keys[before_stable.indexOf(stable)]; + return !after_keys.includes(before_key); + }; + // 2. Check for changes, rename, and removed + before_keys.forEach((key, index) => { + const sub_path = path.concat(key); + if (after_keys.includes(key)) { + merge(changes, diff(before[key], after[key], sub_path)); + } + else if (isRename(before_stable[index], after_stable)) { + const to = after_keys[after_stable.indexOf(before_stable[index])]; + changes.push({ + type: ChangeType.Rename, + path, + from: key, + to + }); + } + else { + changes.push({ + type: ChangeType.Remove, + path: sub_path + }); + } + }); + // 3. Check for additions + after_keys.forEach((key, index) => { + if (!before_keys.includes(key) && !isRename(after_stable[index], before_stable)) { + changes.push({ + type: ChangeType.Add, + path: path.concat(key) + }); + } + }); + return changes; +} +function compareArrays(before, after, path = []) { + let changes = []; + // 1. Convert arrays to stable objects + const before_stable = before.map(stableStringify); + const after_stable = after.map(stableStringify); + // 2. Step through after array making changes to before array as-needed + after_stable.forEach((value, index) => { + const overflow = index >= before_stable.length; + // Check if items are the same + if (!overflow && before_stable[index] === value) { + return; + } + // Check if item has been moved -> shift into place + const from = before_stable.indexOf(value, index + 1); + if (!overflow && from > -1) { + changes.push({ + type: ChangeType.Move, + path, + from, + to: index + }); + const move = before_stable.splice(from, 1); + before_stable.splice(index, 0, ...move); + return; + } + // Check if item is removed -> assume it's been edited and replace + const removed = !after_stable.includes(before_stable[index]); + if (!overflow && removed) { + merge(changes, diff(before[index], after[index], path.concat(index))); + before_stable[index] = value; + return; + } + // Add as new item and shift existing + changes.push({ + type: ChangeType.Add, + path: path.concat(index) + }); + before_stable.splice(index, 0, value); + }); + // 3. Remove any remaining overflow items + for (let i = after_stable.length; i < before_stable.length; i++) { + changes.push({ + type: ChangeType.Remove, + path: path.concat(i) + }); + } + return changes; +} + +function findByPath(node, path) { + if (!path.length) + return node; + if (isKeyValue(node)) { + return findByPath(node.value, path); + } + const indexes = {}; + let found; + if (hasItems(node)) { + node.items.some((item, index) => { + try { + let key = []; + if (isKeyValue(item)) { + key = item.key.value; + } + else if (isTable(item)) { + key = item.key.item.value; + } + else if (isTableArray(item)) { + key = item.key.item.value; + const key_string = stableStringify(key); + if (!indexes[key_string]) { + indexes[key_string] = 0; + } + const array_index = indexes[key_string]++; + key = key.concat(array_index); + } + else if (isInlineItem(item) && isKeyValue(item.item)) { + key = item.item.key.value; + } + else if (isInlineItem(item)) { + key = [index]; + } + if (key.length && arraysEqual(key, path.slice(0, key.length))) { + found = findByPath(item, path.slice(key.length)); + return true; + } + else { + return false; + } + } + catch (err) { + return false; + } + }); + } + if (!found) { + throw new Error(`Could not find node at path ${path.join('.')}`); + } + return found; +} +function tryFindByPath(node, path) { + try { + return findByPath(node, path); + } + catch (err) { } +} +function findParent(node, path) { + let parent_path = path; + let parent; + while (parent_path.length && !parent) { + parent_path = parent_path.slice(0, -1); + parent = tryFindByPath(node, parent_path); + } + if (!parent) { + throw new Error(`Count not find parent node for path ${path.join('.')}`); + } + return parent; +} + +function patch(existing, updated, format) { + const existing_ast = parseTOML(existing); + const items = [...existing_ast]; + const existing_js = toJS(items); + const existing_document = { + type: NodeType.Document, + loc: { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + items + }; + const updated_document = parseJS(updated, format); + const changes = diff(existing_js, updated); + const patched_document = applyChanges(existing_document, updated_document, changes); + return toTOML(patched_document.items); +} +function applyChanges(original, updated, changes) { + // Potential Changes: + // + // Add: Add key-value to object, add item to array + // Edit: Change in value + // Remove: Remove key-value from object, remove item from array + // Move: Move item in array + // Rename: Rename key in key-value + // + // Special consideration, inline comments need to move as-needed + changes.forEach(change => { + if (isAdd(change)) { + const child = findByPath(updated, change.path); + const parent_path = change.path.slice(0, -1); + let index = last(change.path); + let is_table_array = isTableArray(child); + if (isInteger(index) && !parent_path.some(isInteger)) { + const sibling = tryFindByPath(original, parent_path.concat(0)); + if (sibling && isTableArray(sibling)) { + is_table_array = true; + } + } + let parent; + if (isTable(child)) { + parent = original; + } + else if (is_table_array) { + parent = original; + // The index needs to be updated to top-level items + // to properly account for other items, comments, and nesting + const document = original; + const before = tryFindByPath(document, parent_path.concat(index - 1)); + const after = tryFindByPath(document, parent_path.concat(index)); + if (after) { + index = document.items.indexOf(after); + } + else if (before) { + index = document.items.indexOf(before) + 1; + } + else { + index = document.items.length; + } + } + else { + parent = findParent(original, change.path); + if (isKeyValue(parent)) + parent = parent.value; + } + if (isTableArray(parent) || isInlineArray(parent) || isDocument(parent)) { + insert(original, parent, child, index); + } + else { + insert(original, parent, child); + } + } + else if (isEdit(change)) { + let existing = findByPath(original, change.path); + let replacement = findByPath(updated, change.path); + let parent; + if (isKeyValue(existing) && isKeyValue(replacement)) { + // Edit for key-value means value changes + parent = existing; + existing = existing.value; + replacement = replacement.value; + } + else { + parent = findParent(original, change.path); + } + replace(original, parent, existing, replacement); + } + else if (isRemove(change)) { + let parent = findParent(original, change.path); + if (isKeyValue(parent)) + parent = parent.value; + const node = findByPath(original, change.path); + remove(original, parent, node); + } + else if (isMove(change)) { + let parent = findByPath(original, change.path); + if (hasItem(parent)) + parent = parent.item; + if (isKeyValue(parent)) + parent = parent.value; + const node = parent.items[change.from]; + remove(original, parent, node); + insert(original, parent, node, change.to); + } + else if (isRename(change)) { + let parent = findByPath(original, change.path.concat(change.from)); + let replacement = findByPath(updated, change.path.concat(change.to)); + if (hasItem(parent)) + parent = parent.item; + if (hasItem(replacement)) + replacement = replacement.item; + replace(original, parent, parent.key, replacement.key); + } + }); + applyWrites(original); + return original; +} + +function parse(value) { + return toJS(parseTOML(value), value); +} +function stringify(value, format) { + const document = parseJS(value, format); + return toTOML(document.items); +} + +export { parse, patch, stringify }; diff --git a/node_modules/toml-patch/dist/toml-patch.umd.min.js b/node_modules/toml-patch/dist/toml-patch.umd.min.js new file mode 100644 index 0000000..097648f --- /dev/null +++ b/node_modules/toml-patch/dist/toml-patch.umd.min.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e=e||self).TOML={})}(this,function(e){"use strict";var t,n;function l(e){return e.type===t.Document}function o(e){return e.type===t.Table}function r(e){return e.type===t.TableArray}function a(e){return e.type===t.KeyValue}function c(e){return e.type===t.InlineArray}function i(e){return e.type===t.InlineItem}function u(e){return e.type===t.InlineTable}function s(e){return e.type===t.Comment}function f(e){return l(e)||o(e)||r(e)||u(e)||c(e)}function m(e){return function(e){return e.type===t.TableKey}(e)||function(e){return e.type===t.TableArrayKey}(e)||i(e)}!function(e){e.Document="Document",e.Table="Table",e.TableKey="TableKey",e.TableArray="TableArray",e.TableArrayKey="TableArrayKey",e.KeyValue="KeyValue",e.Key="Key",e.String="String",e.Integer="Integer",e.Float="Float",e.Boolean="Boolean",e.DateTime="DateTime",e.InlineArray="InlineArray",e.InlineItem="InlineItem",e.InlineTable="InlineTable",e.Comment="Comment"}(t||(t={}));class d{constructor(e){this.iterator=e,this.index=-1,this.value=void 0,this.done=!1,this.peeked=null}next(){if(this.done)return y();const e=this.peeked||this.iterator.next();return this.index+=1,this.value=e.value,this.done=e.done,this.peeked=null,e}peek(){return this.done?y():this.peeked?this.peeked:(this.peeked=this.iterator.next(),this.peeked)}[Symbol.iterator](){return this}}function y(){return{value:void 0,done:!0}}function p(e){return{lines:e.end.line-e.start.line+1,columns:e.end.column-e.start.column}}function v(e,t){const n=Array.isArray(e)?e:w(e),l=n.findIndex(e=>e>=t)+1;return{line:l,column:t-(n[l-2]+1||0)}}function w(e){const t=/[\r\n|\n]/g,n=[];let l;for(;null!=(l=t.exec(e));)n.push(l.index);return n.push(e.length+1),n}function h(e){return{line:e.line,column:e.column}}function g(e){return{start:h(e.start),end:h(e.end)}}class b extends Error{constructor(e,t,n){let l=`Error parsing TOML (${t.line}, ${t.column+1}):\n`;if(e){const n=function(e,t){const n=w(e),l=n[t.line-2]||0,o=n[t.line-1]||e.length;return e.substr(l,o-l)}(e,t),o=`${function(e,t=" "){return t.repeat(e)}(t.column)}^`;n&&(l+=`${n}\n${o}\n`)}super(l+=n),this.line=t.line,this.column=t.column}}!function(e){e.Bracket="Bracket",e.Curly="Curly",e.Equal="Equal",e.Comma="Comma",e.Dot="Dot",e.Comment="Comment",e.Literal="Literal"}(n||(n={}));const k=/\s/,x=/(\r\n|\n)/,T='"',I="'",A=" ",E="\\",$=/[\w,\d,\",\',\+,\-,\_]/;function*S(e){const t=new d(e[Symbol.iterator]());t.next();const l=function(e){const t=w(e);return(e,n)=>({start:v(t,e),end:v(t,n)})}(e);for(;!t.done;){if(k.test(t.value));else if("["===t.value||"]"===t.value)yield K(t,l,n.Bracket);else if("{"===t.value||"}"===t.value)yield K(t,l,n.Curly);else if("="===t.value)yield K(t,l,n.Equal);else if(","===t.value)yield K(t,l,n.Comma);else if("."===t.value)yield K(t,l,n.Dot);else if("#"===t.value)yield C(t,l);else{const n=j(e,t.index,I)||j(e,t.index,T);n?yield O(t,l,n,e):yield D(t,l,e)}t.next()}}function K(e,t,n){return{type:n,raw:e.value,loc:t(e.index,e.index+1)}}function C(e,t){const l=e.index;let o=e.value;for(;!e.peek().done&&!x.test(e.peek().value);)e.next(),o+=e.value;return{type:n.Comment,raw:o,loc:t(l,e.index+1)}}function O(e,t,l,o){const r=e.index;let a=l+l+l,c=a;for(e.next(),e.next(),e.next();!e.done&&!j(o,e.index,l);)c+=e.value,e.next();if(e.done)throw new b(o,v(o,e.index),`Expected close of multiline string with ${a}, reached end of file`);return c+=a,e.next(),e.next(),{type:n.Literal,raw:c,loc:t(r,e.index+1)}}function D(e,t,l){if(!$.test(e.value))throw new b(l,v(l,e.index),`Unsupported character "${e.value}". Expected ALPHANUMERIC, ", ', +, -, or _`);const o=e.index;let r=e.value,a=e.value===T,c=e.value===I;const i=e=>{if(e.peek().done)return!0;const t=e.peek().value;return!(a||c)&&(k.test(t)||","===t||"."===t||"]"===t||"}"===t||"="===t)};for(;!e.done&&!i(e)&&(e.next(),e.value===T&&(a=!a),e.value!==I||a||(c=!c),r+=e.value,!e.peek().done);){let t=e.peek().value;a&&e.value===E&&(t===T?(r+=T,e.next()):t===E&&(r+=E,e.next()))}if(a||c)throw new b(l,v(l,o),`Expected close of string with ${a?T:I}`);return{type:n.Literal,raw:r,loc:t(o,e.index+1)}}function j(e,t,n){return e[t]===n&&e[t+1]===n&&e[t+2]===n&&n}function B(e){return e[e.length-1]}function _(){return Object.create(null)}function F(e){return"number"==typeof e&&e%1==0}function M(e){return"[object Date]"===Object.prototype.toString.call(e)}function N(e){return e&&"object"==typeof e&&!M(e)&&!Array.isArray(e)}function L(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function U(e,...t){return t.reduce((e,t)=>t(e),e)}function V(e){if(N(e)){return`{${Object.keys(e).sort().map(t=>`${JSON.stringify(t)}:${V(e[t])}`).join(",")}}`}return Array.isArray(e)?`[${e.map(V).join(",")}]`:JSON.stringify(e)}function q(e,t){const n=e.length,l=t.length;e.length=n+l;for(let o=0;o{const t=parseInt(e.replace("\\U",""),16),n=String.fromCodePoint(t);return Z(JSON.stringify(n),1)});return JSON.parse(`"${t}"`)}function Z(e,t){return e.substr(t,e.length-2*t)}function ee(e){return G.test(e)?e.substr(1):e}function te(e){return e.replace(z,P).replace(H,J)}function ne(e){return e.replace(Q,"")}const le="true",oe="false",re=/e/i,ae=/\_/g,ce=/inf/,ie=/nan/,ue=/^0x/,se=/^0o/,fe=/^0b/,me=/(\d{4})-(\d{2})-(\d{2})/,de=/(\d{2}):(\d{2}):(\d{2})/;function*ye(e){const t=S(e),n=new d(t);for(;!n.next().done;)yield*pe(n,e)}function*pe(e,l){if(e.value.type===n.Comment)yield we(e);else if(e.value.type===n.Bracket)yield function(e,l){const o=e.peek().done||e.peek().value.type!==n.Bracket?t.Table:t.TableArray,r=o===t.Table;if(r&&"["!==e.value.raw)throw new b(l,e.value.loc.start,`Expected table opening "[", found ${e.value.raw}`);if(!r&&("["!==e.value.raw||"["!==e.peek().value.raw))throw new b(l,e.value.loc.start,`Expected array of tables opening "[[", found ${e.value.raw+e.peek().value.raw}`);const a=r?{type:t.TableKey,loc:e.value.loc}:{type:t.TableArrayKey,loc:e.value.loc};e.next(),o===t.TableArray&&e.next();if(e.done)throw new b(l,a.loc.start,"Expected table key, reached end of file");a.item={type:t.Key,loc:g(e.value.loc),raw:e.value.raw,value:[X(e.value.raw)]};for(;!e.peek().done&&e.peek().value.type===n.Dot;){e.next();const t=e.value;e.next();const n=" ".repeat(t.loc.start.column-a.item.loc.end.column),l=" ".repeat(e.value.loc.start.column-t.loc.end.column);a.item.loc.end=e.value.loc.end,a.item.raw+=`${n}.${l}${e.value.raw}`,a.item.value.push(X(e.value.raw))}if(e.next(),r&&(e.done||"]"!==e.value.raw))throw new b(l,e.done?a.item.loc.end:e.value.loc.start,`Expected table closing "]", found ${e.done?"end of file":e.value.raw}`);if(!r&&(e.done||e.peek().done||"]"!==e.value.raw||"]"!==e.peek().value.raw))throw new b(l,e.done||e.peek().done?a.item.loc.end:e.value.loc.start,`Expected array of tables closing "]]", found ${e.done||e.peek().done?"end of file":e.value.raw+e.peek().value.raw}`);r||e.next();a.loc.end=e.value.loc.end;let c=[];for(;!e.peek().done&&e.peek().value.type!==n.Bracket;)e.next(),q(c,[...pe(e,l)]);return{type:r?t.Table:t.TableArray,loc:{start:h(a.loc.start),end:h(c.length?c[c.length-1].loc.end:a.loc.end)},key:a,items:c}}(e,l);else{if(e.value.type!==n.Literal)throw new b(l,e.value.loc.start,`Unexpected token "${e.value.type}". Expected Comment, Bracket, or String`);yield*function(e,l){const o={type:t.Key,loc:g(e.value.loc),raw:e.value.raw,value:[X(e.value.raw)]};for(;!e.peek().done&&e.peek().value.type===n.Dot;)e.next(),e.next(),o.loc.end=e.value.loc.end,o.raw+=`.${e.value.raw}`,o.value.push(X(e.value.raw));if(e.next(),e.done||e.value.type!==n.Equal)throw new b(l,e.done?o.loc.end:e.value.loc.start,`Expected "=" for key-value, found ${e.done?"end of file":e.value.raw}`);const r=e.value.loc.start.column;if(e.next(),e.done)throw new b(l,o.loc.start,"Expected value for key-value, reached end of file");const[a,...c]=ve(e,l);return[{type:t.KeyValue,key:o,value:a,loc:{start:h(o.loc.start),end:h(a.loc.end)},equals:r},...c]}(e,l)}}function*ve(e,l){if(e.value.type===n.Literal)e.value.raw[0]===T||e.value.raw[0]===I?yield function(e){return{type:t.String,loc:e.value.loc,raw:e.value.raw,value:X(e.value.raw)}}(e):e.value.raw===le||e.value.raw===oe?yield function(e){return{type:t.Boolean,loc:e.value.loc,value:e.value.raw===le}}(e):me.test(e.value.raw)||de.test(e.value.raw)?yield function(e,l){let o,r=e.value.loc,a=e.value.raw;if(!e.peek().done&&e.peek().value.type===n.Literal&&me.test(a)&&de.test(e.peek().value.raw)){const t=r.start;e.next(),r={start:t,end:e.value.loc.end},a+=` ${e.value.raw}`}if(!e.peek().done&&e.peek().value.type===n.Dot){const t=r.start;if(e.next(),e.peek().done||e.peek().value.type!==n.Literal)throw new b(l,e.value.loc.end,"Expected fractional value for DateTime");e.next(),r={start:t,end:e.value.loc.end},a+=`.${e.value.raw}`}if(me.test(a))o=new Date(a.replace(" ","T"));else{const[e]=(new Date).toISOString().split("T");o=new Date(`${e}T${a}`)}return{type:t.DateTime,loc:r,raw:a,value:o}}(e,l):!e.peek().done&&e.peek().value.type===n.Dot||ce.test(e.value.raw)||ie.test(e.value.raw)||re.test(e.value.raw)&&!ue.test(e.value.raw)?yield function(e,l){let o,r=e.value.loc,a=e.value.raw;if(ce.test(a))o="-inf"===a?-1/0:1/0;else if(ie.test(a))o=NaN;else if(e.peek().done||e.peek().value.type!==n.Dot)o=Number(a.replace(ae,""));else{const t=r.start;if(e.next(),e.peek().done||e.peek().value.type!==n.Literal)throw new b(l,e.value.loc.end,"Expected fraction value for Float");e.next(),a+=`.${e.value.raw}`,r={start:t,end:e.value.loc.end},o=Number(a.replace(ae,""))}return{type:t.Float,loc:r,raw:a,value:o}}(e,l):yield function(e){if("-0"===e.value.raw||"+0"===e.value.raw)return{type:t.Integer,loc:e.value.loc,raw:e.value.raw,value:0};let n=10;ue.test(e.value.raw)?n=16:se.test(e.value.raw)?n=8:fe.test(e.value.raw)&&(n=2);const l=parseInt(e.value.raw.replace(ae,"").replace(se,"").replace(fe,""),n);return{type:t.Integer,loc:e.value.loc,raw:e.value.raw,value:l}}(e);else if(e.value.type===n.Curly)yield function(e,l){if("{"!==e.value.raw)throw new b(l,e.value.loc.start,`Expected "{" for inline table, found ${e.value.raw}`);const o={type:t.InlineTable,loc:g(e.value.loc),items:[]};e.next();for(;!e.done&&(e.value.type!==n.Curly||"}"!==e.value.raw);){if(e.value.type===n.Comma){const t=o.items[o.items.length-1];if(!t)throw new b(l,e.value.loc.start,'Found "," without previous value in inline table');t.comma=!0,t.loc.end=e.value.loc.start,e.next();continue}const[r]=pe(e,l);if(r.type!==t.KeyValue)throw new b(l,e.value.loc.start,`Only key-values are supported in inline tables, found ${r.type}`);const a={type:t.InlineItem,loc:g(r.loc),item:r,comma:!1};o.items.push(a),e.next()}if(e.done||e.value.type!==n.Curly||"}"!==e.value.raw)throw new b(l,e.done?o.loc.start:e.value.loc.start,`Expected "}", found ${e.done?"end of file":e.value.raw}`);return o.loc.end=e.value.loc.end,o}(e,l);else{if(e.value.type!==n.Bracket)throw new b(l,e.value.loc.start,`Unrecognized token type "${e.value.type}". Expected String, Curly, or Bracket`);{const[o,r]=function(e,l){if("["!==e.value.raw)throw new b(l,e.value.loc.start,`Expected "[" for inline array, found ${e.value.raw}`);const o={type:t.InlineArray,loc:g(e.value.loc),items:[]};let r=[];e.next();for(;!e.done&&(e.value.type!==n.Bracket||"]"!==e.value.raw);){if(e.value.type===n.Comma){const t=o.items[o.items.length-1];if(!t)throw new b(l,e.value.loc.start,'Found "," without previous value for inline array');t.comma=!0,t.loc.end=e.value.loc.start}else if(e.value.type===n.Comment)r.push(we(e));else{const[n,...a]=ve(e,l),c={type:t.InlineItem,loc:g(n.loc),item:n,comma:!1};o.items.push(c),q(r,a)}e.next()}if(e.done||e.value.type!==n.Bracket||"]"!==e.value.raw)throw new b(l,e.done?o.loc.start:e.value.loc.start,`Expected "]", found ${e.done?"end of file":e.value.raw}`);return o.loc.end=e.value.loc.end,[o,r]}(e,l);yield o,yield*r}}}function we(e){return{type:t.Comment,loc:e.value.loc,raw:e.value.raw}}function he(e,n){var l;function o(e,t){for(const n of e)r(n,t)}function r(e,l){const a=n[e.type];switch(a&&"function"==typeof a&&a(e,l),a&&a.enter&&a.enter(e,l),e.type){case t.Document:o(e.items,e);break;case t.Table:r(e.key,e),o(e.items,e);break;case t.TableKey:r(e.item,e);break;case t.TableArray:r(e.key,e),o(e.items,e);break;case t.TableArrayKey:r(e.item,e);break;case t.KeyValue:r(e.key,e),r(e.value,e);break;case t.InlineArray:o(e.items,e);break;case t.InlineItem:r(e.item,e);break;case t.InlineTable:o(e.items,e);break;case t.Key:case t.String:case t.Integer:case t.Float:case t.Boolean:case t.DateTime:case t.Comment:break;default:throw new Error(`Unrecognized node type "${e.type}"`)}a&&a.exit&&a.exit(e,l)}null!=(l=e)&&"function"==typeof l[Symbol.iterator]?o(e,null):r(e,null)}const ge=new WeakMap,be=e=>(ge.has(e)||ge.set(e,new WeakMap),ge.get(e)),ke=new WeakMap,xe=e=>(ke.has(e)||ke.set(e,new WeakMap),ke.get(e));function Te(e,t,n,l){if(f(t)){const e=t.items.indexOf(n);if(e<0)throw new Error("Could not find existing item in parent node for replace");t.items.splice(e,1,l)}else if(m(t))t.item=l;else{if(!a(t))throw new Error(`Unsupported parent type "${t.type}" for replace`);t.key===n?t.key=l:t.value=l}Ke(l,{lines:n.loc.start.line-l.loc.start.line,columns:n.loc.start.column-l.loc.start.column});const o=p(n.loc),r=p(l.loc);Ce({lines:r.lines-o.lines,columns:r.columns-o.columns},xe(e),l,n)}function Ie(e,t,n,m){if(!f(t))throw new Error(`Unsupported parent type "${t.type}" for insert`);let d,y;m=null!=m?m:t.items.length,c(t)||u(t)?({shift:d,offset:y}=function(e,t,n){if(!i(t))throw new Error(`Incompatible child type "${t.type}"`);const l=null!=n?e.items[n-1]:B(e.items),o=null==n||n===e.items.length;e.items.splice(n,0,t);const r=!!l,a=!o,u=o&&!0===t.comma;r&&(l.comma=!0);a&&(t.comma=!0);const f=c(e)&&function(e){if(!e.items.length)return!1;return p(e.loc).lines>e.items.length}(e),m=l?{line:l.loc.end.line,column:f?s(l)?e.loc.start.column:l.loc.start.column:l.loc.end.column}:h(e.loc.start);let d=0;if(f)d=1;else{const e=2,t=1;m.column+=r?e:t}m.line+=d;const y={lines:m.line-t.loc.start.line,columns:m.column-t.loc.start.column},v=p(t.loc),w={lines:v.lines+(d-1),columns:v.columns+(r||a?2:0)+(u?1:0)};return{shift:y,offset:w}}(t,n,m)):({shift:d,offset:y}=function(e,t,n){if(c=t,!(a(c)||o(c)||r(c)||s(c)))throw new Error(`Incompatible child type "${t.type}"`);var c;const i=e.items[n-1],u=l(e)&&!e.items.length;e.items.splice(n,0,t);const f=i?{line:i.loc.end.line,column:s(i)?e.loc.start.column:i.loc.start.column}:h(e.loc.start),m=o(t)||r(t);let d=0;u||(d=m?2:1);f.line+=d;const y={lines:f.line-t.loc.start.line,columns:f.column-t.loc.start.column},v=p(t.loc),w={lines:v.lines+(d-1),columns:v.columns};return{shift:y,offset:w}}(t,n,m)),Ke(n,d);const v=t.items[m-1],w=v&&xe(e).get(v);w&&(y.lines+=w.lines,y.columns+=w.columns,i(n)&&v&&t.items[m+1]&&(y.columns-=2),xe(e).delete(v)),xe(e).set(n,y)}function Ae(e,t,n){if(!f(t))throw new Error(`Unsupported parent type "${t.type}" for remove`);let l=t.items.indexOf(n);if(l<0){if((l=t.items.findIndex(e=>m(e)&&e.item===n))<0)throw new Error("Could not find node in parent for removal");n=t.items[l]}const o=t.items[l-1];let r=t.items[l+1];t.items.splice(l,1);let a=p(n.loc);r&&s(r)&&r.loc.start.line===n.loc.end.line&&(a=p({start:n.loc.start,end:r.loc.end}),r=t.items[l+1],t.items.splice(l,1));const c=o&&i(o),u=o&&o.loc.end.line===n.loc.start.line,d=r&&r.loc.start.line===n.loc.end.line,y=c&&(u||d),v={lines:-(a.lines-(y?1:0)),columns:-a.columns};c&&u&&(v.columns-=2),c&&o&&!r&&(o.comma=!1);const w=o||t,h=o?xe(e):be(e),g=xe(e),b=h.get(w);b&&(v.lines+=b.lines,v.columns+=b.columns);const k=g.get(n);k&&(v.lines+=k.lines,v.columns+=k.columns),h.set(w,v)}function Ee(e,t,n=!0){if(!n)return;if(!t.items.length)return;Ce({lines:0,columns:1},be(e),t);const l=B(t.items);Ce({lines:0,columns:1},xe(e),l)}function $e(e,t,n=!1){if(!n)return;if(!t.items.length)return;const l=B(t.items);l.comma=!0,Ce({lines:0,columns:1},xe(e),l)}function Se(e){const n=be(e),l=xe(e),o={lines:0,columns:{}};function r(e){e.loc.start.line+=o.lines,e.loc.start.column+=o.columns[e.loc.start.line]||0;const t=n.get(e);t&&(o.lines+=t.lines,o.columns[e.loc.start.line]=(o.columns[e.loc.start.line]||0)+t.columns)}function a(e){e.loc.end.line+=o.lines,e.loc.end.column+=o.columns[e.loc.end.line]||0;const t=l.get(e);t&&(o.lines+=t.lines,o.columns[e.loc.end.line]=(o.columns[e.loc.end.line]||0)+t.columns)}const c={enter:r,exit:a};he(e,{[t.Document]:c,[t.Table]:c,[t.TableArray]:c,[t.InlineTable]:c,[t.InlineArray]:c,[t.InlineItem]:c,[t.TableKey]:c,[t.TableArrayKey]:c,[t.KeyValue]:{enter(e){const t=e.loc.start.line+o.lines,n=l.get(e.key);e.equals+=(o.columns[t]||0)+(n?n.columns:0),r(e)},exit:a},[t.Key]:c,[t.String]:c,[t.Integer]:c,[t.Float]:c,[t.Boolean]:c,[t.DateTime]:c,[t.Comment]:c}),ge.delete(e),ke.delete(e)}function Ke(e,n,l={}){const{first_line_only:o=!1}=l,r=e.loc.start.line,{lines:a,columns:c}=n,i=e=>{o&&e.loc.start.line!==r||(e.loc.start.column+=c,e.loc.end.column+=c),e.loc.start.line+=a,e.loc.end.line+=a};return he(e,{[t.Table]:i,[t.TableKey]:i,[t.TableArray]:i,[t.TableArrayKey]:i,[t.KeyValue](e){i(e),e.equals+=c},[t.Key]:i,[t.String]:i,[t.Integer]:i,[t.Float]:i,[t.Boolean]:i,[t.DateTime]:i,[t.InlineArray]:i,[t.InlineItem]:i,[t.InlineTable]:i,[t.Comment]:i}),e}function Ce(e,t,n,l){const o=t.get(l||n);o&&(e.lines+=o.lines,e.columns+=o.columns),t.set(n,e)}function Oe(){return{type:t.Document,loc:{start:{line:1,column:0},end:{line:1,column:0}},items:[]}}function De(e){const n=function(e){const n=Fe(e);return{type:t.TableKey,loc:{start:{line:1,column:0},end:{line:1,column:n.length+2}},item:{type:t.Key,loc:{start:{line:1,column:1},end:{line:1,column:n.length+1}},value:e,raw:n}}}(e);return{type:t.Table,loc:g(n.loc),key:n,items:[]}}function je(e){const n=function(e){const n=Fe(e);return{type:t.TableArrayKey,loc:{start:{line:1,column:0},end:{line:1,column:n.length+4}},item:{type:t.Key,loc:{start:{line:1,column:2},end:{line:1,column:n.length+2}},value:e,raw:n}}}(e);return{type:t.TableArray,loc:g(n.loc),key:n,items:[]}}function Be(e,n){const l=function(e){const n=Fe(e);return{type:t.Key,loc:{start:{line:1,column:0},end:{line:1,column:n.length}},raw:n,value:e}}(e),{column:o}=l.loc.end,r=o+1;return Ke(n,{lines:0,columns:o+3-n.loc.start.column},{first_line_only:!0}),{type:t.KeyValue,loc:{start:h(l.loc.start),end:h(n.loc.end)},key:l,equals:r,value:n}}const _e=/[\w,\d,\_,\-]+/;function Fe(e){return e.map(e=>_e.test(e)?e:JSON.stringify(e)).join(".")}function Me(e){return{type:t.InlineItem,loc:g(e.loc),item:e,comma:!1}}function Ne(e){return e.items.filter(e=>{if(!a(e))return!1;const t=u(e.value),n=c(e.value)&&e.value.items.length&&u(e.value.items[0].item);return t||n}).forEach(t=>{Ae(e,e,t),u(t.value)?Ie(e,e,function(e){const t=De(e.key.value);for(const n of e.value.items)Ie(t,t,n.item);return Se(t),t}(t)):function(e){const t=Oe();for(const n of e.value.items){const l=je(e.key.value);Ie(t,t,l);for(const e of n.item.items)Ie(t,l,e.item)}return Se(t),t.items}(t).forEach(t=>{Ie(e,e,t)})}),Se(e),e}function Le(e){let t=0,n=0;for(const l of e.items)0===n&&l.loc.start.line>1?t=1-l.loc.start.line:l.loc.start.line+t>n+2&&(t+=n+2-(l.loc.start.line+t)),Ke(l,{lines:t,columns:0}),n=l.loc.end.line;return e}const Ue={printWidth:80,trailingComma:!1,bracketSpacing:!0};function Ve(e,t={}){t=Object.assign({},Ue,t),e=We(e);const n=Oe();for(const l of qe(e,t))Ie(n,n,l);return Se(n),U(n,Ne,e=>(function(e,t){return e})(e),Le)}function*qe(e,t){for(const n of Object.keys(e))yield Be([n],Re(e[n],t))}function Re(e,n){if(null==e)throw new Error('"null" and "undefined" values are not supported');return function(e){return"string"==typeof e}(e)?function(e){const n=JSON.stringify(e);return{type:t.String,loc:{start:{line:1,column:0},end:{line:1,column:n.length}},raw:n,value:e}}(e):F(e)?function(e){const n=e.toString();return{type:t.Integer,loc:{start:{line:1,column:0},end:{line:1,column:n.length}},raw:n,value:e}}(e):function(e){return"number"==typeof e&&!F(e)}(e)?function(e){const n=e.toString();return{type:t.Float,loc:{start:{line:1,column:0},end:{line:1,column:n.length}},raw:n,value:e}}(e):function(e){return"boolean"==typeof e}(e)?function(e){return{type:t.Boolean,loc:{start:{line:1,column:0},end:{line:1,column:e?4:5}},value:e}}(e):M(e)?function(e){const n=e.toISOString();return{type:t.DateTime,loc:{start:{line:1,column:0},end:{line:1,column:n.length}},raw:n,value:e}}(e):Array.isArray(e)?function(e,n){const l={type:t.InlineArray,loc:{start:{line:1,column:0},end:{line:1,column:2}},items:[]};for(const t of e){const e=Re(t,n),o=Me(e);Ie(l,l,o)}return Ee(l,l,n.bracketSpacing),$e(l,l,n.trailingComma),Se(l),l}(e,n):function(e,n){if(!N(e=We(e)))return Re(e,n);const l={type:t.InlineTable,loc:{start:{line:1,column:0},end:{line:1,column:2}},items:[]},o=[...qe(e,n)];for(const e of o){const t=Me(e);Ie(l,l,t)}return Ee(l,l,n.bracketSpacing),$e(l,l,n.trailingComma),Se(l),l}(e,n)}function We(e){return e&&!M(e)&&"function"==typeof e.toJSON?e.toJSON():e}const Je=/(\r\n|\n)/g;function Pe(e,n="\n"){const l=[];return he(e,{[t.TableKey](e){const{start:t,end:n}=e.loc;ze(l,{start:t,end:{line:t.line,column:t.column+1}},"["),ze(l,{start:{line:n.line,column:n.column-1},end:n},"]")},[t.TableArrayKey](e){const{start:t,end:n}=e.loc;ze(l,{start:t,end:{line:t.line,column:t.column+2}},"[["),ze(l,{start:{line:n.line,column:n.column-2},end:n},"]]")},[t.KeyValue](e){const{start:{line:t}}=e.loc;ze(l,{start:{line:t,column:e.equals},end:{line:t,column:e.equals+1}},"=")},[t.Key](e){ze(l,e.loc,e.raw)},[t.String](e){ze(l,e.loc,e.raw)},[t.Integer](e){ze(l,e.loc,e.raw)},[t.Float](e){ze(l,e.loc,e.raw)},[t.Boolean](e){ze(l,e.loc,e.value.toString())},[t.DateTime](e){ze(l,e.loc,e.raw)},[t.InlineArray](e){const{start:t,end:n}=e.loc;ze(l,{start:t,end:{line:t.line,column:t.column+1}},"["),ze(l,{start:{line:n.line,column:n.column-1},end:n},"]")},[t.InlineTable](e){const{start:t,end:n}=e.loc;ze(l,{start:t,end:{line:t.line,column:t.column+1}},"{"),ze(l,{start:{line:n.line,column:n.column-1},end:n},"}")},[t.InlineItem](e){if(!e.comma)return;const t=e.loc.end;ze(l,{start:t,end:{line:t.line,column:t.column+1}},",")},[t.Comment](e){ze(l,e.loc,e.raw)}}),l.join(n)+n}function ze(e,t,n){const l=n.split(Je),o=t.end.line-t.start.line+1;if(l.length!==o)throw new Error(`Mismatch between location and raw string, expected ${o} lines for "${n}"`);for(let n=t.start.line;n<=t.end.line;n++){const o=He(e,n),r=n===t.start.line,a=n===t.end.line,c=r?o.substr(0,t.start.column).padEnd(t.start.column,A):"",i=a?o.substr(t.end.column):"";e[n-1]=c+l[n-t.start.line]+i}}function He(e,t){if(!e[t-1])for(let n=0;n{const n=t.key.value,o=e(t.value),r=n.length>1?Xe(l,n.slice(0,-1)):l;r[B(n)]=o}),l;case t.InlineArray:return n.items.map(t=>e(t.item));case t.String:case t.Integer:case t.Float:case t.Boolean:case t.DateTime:return n.value;default:throw new Error(`Unrecognized value type "${n.type}"`)}}(e.value);(l.length>1?Xe(i,l.slice(0,-1)):i)[B(l)]=f,a.add(Ze(l)),u(e.value)&&(c=i,i=f)},exit(e){u(e.value)&&(i=c)}},[t.InlineTable]:{enter(){s=!0},exit(){s=!1}}}),l}function Qe(e,n,l,o){let r=[],a=0;for(const t of n){if(r.push(t),!L(e,t))return;if("object"!=typeof(c=e[t])&&!M(c))throw new Error(`Invalid key, a value has already been defined for ${r.join(".")}`);const l=Ze(r);if(Array.isArray(e[t])&&!o.table_arrays.has(l))throw new Error(`Invalid key, cannot add to a static array at ${l}`);const i=a++(e[t]||(e[t]=_()),Array.isArray(e[t])?B(e[t]):e[t]),e)}function Ze(e){return e.join(".")}var et;function tt(e,t,n=[]){return e===t||(o=t,M(l=e)&&M(o)&&l.toISOString()===o.toISOString())?[]:Array.isArray(e)&&Array.isArray(t)?function(e,t,n=[]){let l=[];const o=e.map(V),r=t.map(V);r.forEach((a,c)=>{const i=c>=o.length;if(!i&&o[c]===a)return;const u=o.indexOf(a,c+1);if(!i&&u>-1){l.push({type:et.Move,path:n,from:u,to:c});const e=o.splice(u,1);return void o.splice(c,0,...e)}const s=!r.includes(o[c]);if(!i&&s)return q(l,tt(e[c],t[c],n.concat(c))),void(o[c]=a);l.push({type:et.Add,path:n.concat(c)}),o.splice(c,0,a)});for(let e=r.length;eV(e[t])),a=Object.keys(t),c=a.map(e=>V(t[e])),i=(e,t)=>{const n=t.indexOf(e);if(n<0)return!1;const l=o[r.indexOf(e)];return!a.includes(l)};return o.forEach((o,u)=>{const s=n.concat(o);if(a.includes(o))q(l,tt(e[o],t[o],s));else if(i(r[u],c)){const e=a[c.indexOf(r[u])];l.push({type:et.Rename,path:n,from:o,to:e})}else l.push({type:et.Remove,path:s})}),a.forEach((e,t)=>{o.includes(e)||i(c[t],r)||l.push({type:et.Add,path:n.concat(e)})}),l}(e,t,n):[{type:et.Edit,path:n}];var l,o}function nt(e,t){if(!t.length)return e;if(a(e))return nt(e.value,t);const n={};let l;if(f(e)&&e.items.some((e,c)=>{try{let u=[];if(a(e))u=e.key.value;else if(o(e))u=e.key.item.value;else if(r(e)){const t=V(u=e.key.item.value);n[t]||(n[t]=0);const l=n[t]++;u=u.concat(l)}else i(e)&&a(e.item)?u=e.item.key.value:i(e)&&(u=[c]);return!(!u.length||!function(e,t){if(e.length!==t.length)return!1;for(let n=0;n{if(function(e){return e.type===et.Add}(n)){const i=nt(t,n.path),u=n.path.slice(0,-1);let s,f=B(n.path),m=r(i);if(F(f)&&!u.some(F)){const t=lt(e,u.concat(0));t&&r(t)&&(m=!0)}if(o(i))s=e;else if(m){s=e;const t=e,n=lt(t,u.concat(f-1)),l=lt(t,u.concat(f));f=l?t.items.indexOf(l):n?t.items.indexOf(n)+1:t.items.length}else a(s=ot(e,n.path))&&(s=s.value);r(s)||c(s)||l(s)?Ie(e,s,i,f):Ie(e,s,i)}else if(function(e){return e.type===et.Edit}(n)){let l,o=nt(e,n.path),r=nt(t,n.path);a(o)&&a(r)?(l=o,o=o.value,r=r.value):l=ot(e,n.path),Te(e,l,o,r)}else if(function(e){return e.type===et.Remove}(n)){let t=ot(e,n.path);a(t)&&(t=t.value);const l=nt(e,n.path);Ae(e,t,l)}else if(function(e){return e.type===et.Move}(n)){let t=nt(e,n.path);m(t)&&(t=t.item),a(t)&&(t=t.value);const l=t.items[n.from];Ae(e,t,l),Ie(e,t,l,n.to)}else if(function(e){return e.type===et.Rename}(n)){let l=nt(e,n.path.concat(n.from)),o=nt(t,n.path.concat(n.to));m(l)&&(l=l.item),m(o)&&(o=o.item),Te(e,l,l.key,o.key)}}),Se(e),e}({type:t.Document,loc:{start:{line:1,column:0},end:{line:1,column:0}},items:u},Ve(n,i),tt(s,n)).items)},e.stringify=function(e,t){return Pe(Ve(e,t).items)},Object.defineProperty(e,"__esModule",{value:!0})}); +//# sourceMappingURL=toml-patch.umd.min.js.map diff --git a/node_modules/toml-patch/dist/toml-patch.umd.min.js.map b/node_modules/toml-patch/dist/toml-patch.umd.min.js.map new file mode 100644 index 0000000..908eb06 --- /dev/null +++ b/node_modules/toml-patch/dist/toml-patch.umd.min.js.map @@ -0,0 +1 @@ +{"version":3,"file":"toml-patch.umd.min.js","sources":["../src/ast.ts","../src/tokenizer.ts","../src/cursor.ts","../src/location.ts","../src/parse-error.ts","../src/utils.ts","../src/parse-string.ts","../src/parse-toml.ts","../src/traverse.ts","../src/writer.ts","../src/generate.ts","../src/format.ts","../src/parse-js.ts","../src/to-toml.ts","../src/to-js.ts","../src/diff.ts","../src/find-by-path.ts","../src/index.ts","../src/patch.ts"],"sourcesContent":["import { Location } from './location';\n\nexport enum NodeType {\n Document = 'Document',\n Table = 'Table',\n TableKey = 'TableKey',\n TableArray = 'TableArray',\n TableArrayKey = 'TableArrayKey',\n KeyValue = 'KeyValue',\n Key = 'Key',\n String = 'String',\n Integer = 'Integer',\n Float = 'Float',\n Boolean = 'Boolean',\n DateTime = 'DateTime',\n InlineArray = 'InlineArray',\n InlineItem = 'InlineItem',\n InlineTable = 'InlineTable',\n Comment = 'Comment'\n}\n\nexport type AST = Iterable;\n\n//\n// Document\n//\n// Top-level document that stores AST nodes\n//\nexport interface Document extends Node {\n type: NodeType.Document;\n items: Array;\n}\nexport function isDocument(node: Node): node is Document {\n return node.type === NodeType.Document;\n}\n\n//\n// Table\n//\n// Top-level object\n//\n// v-------|\n// [table] |\n// b = \"c\" |\n// |\n// # note |\n// ^--|\n// [b]\n//\nexport interface Table extends Node {\n type: NodeType.Table;\n key: TableKey;\n items: Array;\n}\nexport function isTable(node: Node): node is Table {\n return node.type === NodeType.Table;\n}\n\n//\n// TableKey\n//\n// Used to store bracket information for Table keys\n//\n// loc includes brackets\n//\n// [ key ]\n// ^-------^\n//\nexport interface TableKey extends Node {\n type: NodeType.TableKey;\n item: Key;\n}\nexport function isTableKey(node: Node): node is TableKey {\n return node.type === NodeType.TableKey;\n}\n\n//\n// TableArray\n//\n// Top-level array item\n//\n// v---------|\n// [[array]] |\n// a=\"b\" |\n// |\n// # details |\n// ^-|\n// [[array]]\n//\nexport interface TableArray extends Node {\n type: NodeType.TableArray;\n key: TableArrayKey;\n items: Array;\n}\nexport function isTableArray(node: Node): node is TableArray {\n return node.type === NodeType.TableArray;\n}\n\n//\n// TableArrayKey\n//\n// Used to store bracket information for TableArray keys\n// loc includes brackets\n//\n// [[ key ]]\n// ^---------^\n//\nexport interface TableArrayKey extends Node {\n type: NodeType.TableArrayKey;\n item: Key;\n}\nexport function isTableArrayKey(node: Node): node is TableArrayKey {\n return node.type === NodeType.TableArrayKey;\n}\n\n//\n// KeyValue\n//\n// Key and Value nodes, with position information on equals sign\n//\n// key=\"value\" # note\n// ^---------^\n//\nexport interface KeyValue extends Node {\n type: NodeType.KeyValue;\n key: Key;\n value: Value;\n\n // Column index (0-based) of equals sign\n equals: number;\n}\nexport function isKeyValue(node: Node): node is KeyValue {\n return node.type === NodeType.KeyValue;\n}\n\n//\n// Key\n//\n// Store raw key and parts (from dots)\n//\nexport interface Key extends Node {\n type: NodeType.Key;\n raw: string;\n\n // Note: Array for keys with dots\n // e.g. a.b -> raw = 'a.b', value = ['a', 'b']\n value: string[];\n}\nexport function isKey(node: Node): node is Key {\n return node.type === NodeType.Key;\n}\n\n//\n// String\n//\n// loc includes quotes\n//\n// a = \"string\"\n// ^------^\n//\nexport interface String extends Node {\n type: NodeType.String;\n raw: string;\n value: string;\n}\nexport function isString(node: Node): node is String {\n return node.type === NodeType.String;\n}\n\n//\n// Integer\n//\nexport interface Integer extends Node {\n type: NodeType.Integer;\n raw: string;\n value: number;\n}\nexport function isInteger(node: Node): node is Integer {\n return node.type === NodeType.Integer;\n}\n\n//\n// Float\n//\nexport interface Float extends Node {\n type: NodeType.Float;\n raw: string;\n value: number;\n}\nexport function isFloat(node: Node): node is Float {\n return node.type === NodeType.Float;\n}\n\n//\n// Boolean\n//\nexport interface Boolean extends Node {\n type: NodeType.Boolean;\n\n // Only `true` and `false` are permitted\n // -> don't need separate raw and value\n value: boolean;\n}\nexport function isBoolean(node: Node): node is Boolean {\n return node.type === NodeType.Boolean;\n}\n\n//\n// DateTime\n//\n// Note: Currently, Offset Date-Time, Local Date-Time, Local Date, and Local Time\n// are handled via raw\n//\nexport interface DateTime extends Node {\n type: NodeType.DateTime;\n raw: string;\n value: Date;\n}\nexport function isDateTime(node: Node): node is DateTime {\n return node.type === NodeType.DateTime;\n}\n\n//\n// InlineArray\n//\nexport interface InlineArray extends Node {\n type: NodeType.InlineArray;\n items: InlineArrayItem[];\n}\nexport function isInlineArray(node: Node): node is InlineArray {\n return node.type === NodeType.InlineArray;\n}\n\n//\n// InlineArrayItem\n//\n// loc for InlineArrayItem is from start of value to before comma\n// or end-of-value if no comma\n//\n// [ \"a\" ,\"b\", \"c\" ]\n// ^---^ ^-^ ^-^\n//\nexport interface InlineItem extends Node {\n type: NodeType.InlineItem;\n item: TItem;\n comma: boolean;\n}\nexport function isInlineItem(node: Node): node is InlineItem {\n return node.type === NodeType.InlineItem;\n}\n\nexport interface InlineArrayItem extends InlineItem {}\n\n//\n// InlineTable\n//\nexport interface InlineTable extends Node {\n type: NodeType.InlineTable;\n items: InlineTableItem[];\n}\nexport function isInlineTable(node: Node): node is InlineTable {\n return node.type === NodeType.InlineTable;\n}\n\n//\n// InlineTableItem\n//\n// loc for InlineTableItem follows InlineArrayItem\n//\n// { a=\"b\" , c = \"d\" }\n// ^------^ ^--------^\n//\nexport interface InlineTableItem extends InlineItem {}\n\n//\n// Comment\n//\n// loc starts at \"#\" and goes to end of comment (trailing whitespace ignored)\n//\n// # comment here\n// ^------------^\n//\nexport interface Comment extends Node {\n type: NodeType.Comment;\n raw: string;\n}\nexport function isComment(node: Node): node is Comment {\n return node.type === NodeType.Comment;\n}\n\n//\n// Combinations\n//\n\nexport interface WithItems extends Node {\n items: Node[];\n}\nexport function hasItems(node: Node): node is WithItems {\n return (\n isDocument(node) ||\n isTable(node) ||\n isTableArray(node) ||\n isInlineTable(node) ||\n isInlineArray(node)\n );\n}\n\nexport interface WithItem extends Node {\n item: Node;\n}\nexport function hasItem(node: Node): node is WithItem {\n return isTableKey(node) || isTableArrayKey(node) || isInlineItem(node);\n}\n\nexport type Block = KeyValue | Table | TableArray | Comment;\nexport function isBlock(node: Node): node is Block {\n return isKeyValue(node) || isTable(node) || isTableArray(node) || isComment(node);\n}\n\nexport type Value =\n | String\n | Integer\n | Float\n | Boolean\n | DateTime\n | InlineArray\n | InlineTable;\nexport function isValue(node: Node): node is Value {\n return (\n isString(node) ||\n isInteger(node) ||\n isFloat(node) ||\n isBoolean(node) ||\n isDateTime(node) ||\n isInlineArray(node) ||\n isInlineTable(node)\n );\n}\n\nexport interface Node {\n type: NodeType;\n loc: Location;\n}\n","import Cursor, { iterator } from './cursor';\nimport { Location, Locator, createLocate, findPosition } from './location';\nimport ParseError from './parse-error';\n\nexport enum TokenType {\n Bracket = 'Bracket',\n Curly = 'Curly',\n Equal = 'Equal',\n Comma = 'Comma',\n Dot = 'Dot',\n Comment = 'Comment',\n Literal = 'Literal'\n}\n\nexport interface Token {\n type: TokenType;\n raw: string;\n loc: Location;\n}\n\nexport const IS_WHITESPACE = /\\s/;\nexport const IS_NEW_LINE = /(\\r\\n|\\n)/;\nexport const DOUBLE_QUOTE = `\"`;\nexport const SINGLE_QUOTE = `'`;\nexport const SPACE = ' ';\nexport const ESCAPE = '\\\\';\n\nconst IS_VALID_LEADING_CHARACTER = /[\\w,\\d,\\\",\\',\\+,\\-,\\_]/;\n\nexport function* tokenize(input: string): IterableIterator {\n const cursor = new Cursor(iterator(input));\n cursor.next();\n\n const locate = createLocate(input);\n\n while (!cursor.done) {\n if (IS_WHITESPACE.test(cursor.value!)) {\n // (skip whitespace)\n } else if (cursor.value === '[' || cursor.value === ']') {\n // Handle special characters: [, ], {, }, =, comma\n yield specialCharacter(cursor, locate, TokenType.Bracket);\n } else if (cursor.value === '{' || cursor.value === '}') {\n yield specialCharacter(cursor, locate, TokenType.Curly);\n } else if (cursor.value === '=') {\n yield specialCharacter(cursor, locate, TokenType.Equal);\n } else if (cursor.value === ',') {\n yield specialCharacter(cursor, locate, TokenType.Comma);\n } else if (cursor.value === '.') {\n yield specialCharacter(cursor, locate, TokenType.Dot);\n } else if (cursor.value === '#') {\n // Handle comments = # -> EOL\n yield comment(cursor, locate);\n } else {\n const multiline_char =\n checkThree(input, cursor.index, SINGLE_QUOTE) ||\n checkThree(input, cursor.index, DOUBLE_QUOTE);\n\n if (multiline_char) {\n // Multi-line literals or strings = no escaping\n yield multiline(cursor, locate, multiline_char, input);\n } else {\n yield string(cursor, locate, input);\n }\n }\n\n cursor.next();\n }\n}\n\nfunction specialCharacter(cursor: Cursor, locate: Locator, type: TokenType): Token {\n return { type, raw: cursor.value!, loc: locate(cursor.index, cursor.index + 1) };\n}\n\nfunction comment(cursor: Cursor, locate: Locator): Token {\n const start = cursor.index;\n let raw = cursor.value!;\n while (!cursor.peek().done && !IS_NEW_LINE.test(cursor.peek().value!)) {\n cursor.next();\n raw += cursor.value!;\n }\n\n // Early exit is ok for comment, no closing conditions\n\n return {\n type: TokenType.Comment,\n raw,\n loc: locate(start, cursor.index + 1)\n };\n}\n\nfunction multiline(\n cursor: Cursor,\n locate: Locator,\n multiline_char: string,\n input: string\n): Token {\n const start = cursor.index;\n let quotes = multiline_char + multiline_char + multiline_char;\n let raw = quotes;\n\n // Skip over quotes\n cursor.next();\n cursor.next();\n cursor.next();\n\n while (!cursor.done && !checkThree(input, cursor.index, multiline_char)) {\n raw += cursor.value;\n cursor.next();\n }\n\n if (cursor.done) {\n throw new ParseError(\n input,\n findPosition(input, cursor.index),\n `Expected close of multiline string with ${quotes}, reached end of file`\n );\n }\n\n raw += quotes;\n\n cursor.next();\n cursor.next();\n\n return {\n type: TokenType.Literal,\n raw,\n loc: locate(start, cursor.index + 1)\n };\n}\n\nfunction string(cursor: Cursor, locate: Locator, input: string): Token {\n // Remaining possibilities: keys, strings, literals, integer, float, boolean\n //\n // Special cases:\n // \"...\" -> quoted\n // '...' -> quoted\n // \"...\".'...' -> bare\n // 0000-00-00 00:00:00 -> bare\n //\n // See https://github.com/toml-lang/toml#offset-date-time\n //\n // | For the sake of readability, you may replace the T delimiter between date and time with a space (as permitted by RFC 3339 section 5.6).\n // | `odt4 = 1979-05-27 07:32:00Z`\n //\n // From RFC 3339:\n //\n // | NOTE: ISO 8601 defines date and time separated by \"T\".\n // | Applications using this syntax may choose, for the sake of\n // | readability, to specify a full-date and full-time separated by\n // | (say) a space character.\n\n // First, check for invalid characters\n if (!IS_VALID_LEADING_CHARACTER.test(cursor.value!)) {\n throw new ParseError(\n input,\n findPosition(input, cursor.index),\n `Unsupported character \"${cursor.value}\". Expected ALPHANUMERIC, \", ', +, -, or _`\n );\n }\n\n const start = cursor.index;\n let raw = cursor.value!;\n let double_quoted = cursor.value === DOUBLE_QUOTE;\n let single_quoted = cursor.value === SINGLE_QUOTE;\n\n const isFinished = (cursor: Cursor) => {\n if (cursor.peek().done) return true;\n const next_item = cursor.peek().value!;\n\n return (\n !(double_quoted || single_quoted) &&\n (IS_WHITESPACE.test(next_item) ||\n next_item === ',' ||\n next_item === '.' ||\n next_item === ']' ||\n next_item === '}' ||\n next_item === '=')\n );\n };\n\n while (!cursor.done && !isFinished(cursor)) {\n cursor.next();\n\n if (cursor.value === DOUBLE_QUOTE) double_quoted = !double_quoted;\n if (cursor.value === SINGLE_QUOTE && !double_quoted) single_quoted = !single_quoted;\n\n raw += cursor.value!;\n\n if (cursor.peek().done) break;\n let next_item = cursor.peek().value!;\n\n // If next character is escape and currently double-quoted,\n // check for escaped quote\n if (double_quoted && cursor.value === ESCAPE) {\n if (next_item === DOUBLE_QUOTE) {\n raw += DOUBLE_QUOTE;\n cursor.next();\n } else if (next_item === ESCAPE) {\n raw += ESCAPE;\n cursor.next();\n }\n }\n }\n\n if (double_quoted || single_quoted) {\n throw new ParseError(\n input,\n findPosition(input, start),\n `Expected close of string with ${double_quoted ? DOUBLE_QUOTE : SINGLE_QUOTE}`\n );\n }\n\n return {\n type: TokenType.Literal,\n raw,\n loc: locate(start, cursor.index + 1)\n };\n}\n\nfunction checkThree(input: string, current: number, check: string): false | string {\n return (\n input[current] === check &&\n input[current + 1] === check &&\n input[current + 2] === check &&\n check\n );\n}\n","export function iterator(value: Iterable): Iterator {\n return value[Symbol.iterator]();\n}\n\nexport default class Cursor implements Iterator {\n iterator: Iterator;\n index: number;\n value?: T;\n done: boolean;\n peeked: IteratorResult | null;\n\n constructor(iterator: Iterator) {\n this.iterator = iterator;\n this.index = -1;\n this.value = undefined;\n this.done = false;\n this.peeked = null;\n }\n\n next(): IteratorResult {\n if (this.done) return done();\n\n const result = this.peeked || this.iterator.next();\n\n this.index += 1;\n this.value = result.value;\n this.done = result.done;\n this.peeked = null;\n\n return result;\n }\n\n peek(): IteratorResult {\n if (this.done) return done();\n if (this.peeked) return this.peeked;\n\n this.peeked = this.iterator.next();\n return this.peeked;\n }\n\n [Symbol.iterator]() {\n return this;\n }\n}\n\nfunction done(): IteratorResult {\n return { value: undefined, done: true };\n}\n","export interface Location {\n start: Position;\n end: Position;\n}\n\nexport interface Position {\n // Note: line is 1-indexed while column is 0-indexed\n line: number;\n column: number;\n}\n\nexport interface Span {\n lines: number;\n columns: number;\n}\n\nexport function getSpan(location: Location): Span {\n return {\n lines: location.end.line - location.start.line + 1,\n columns: location.end.column - location.start.column\n };\n}\n\nexport type Locator = (start: number, end: number) => Location;\nexport function createLocate(input: string): Locator {\n const lines = findLines(input);\n\n return (start: number, end: number) => {\n return {\n start: findPosition(lines, start),\n end: findPosition(lines, end)\n };\n };\n}\n\nexport function findPosition(input: string | number[], index: number): Position {\n // abc\\ndef\\ng\n // 0123 4567 8\n // 012\n // 0\n //\n // lines = [3, 7, 9]\n //\n // c = 2: 0 -> 1, 2 - (undefined + 1 || 0) = 2\n // 3: 0 -> 1, 3 - (undefined + 1 || 0) = 3\n // e = 5: 1 -> 2, 5 - (3 + 1 || 0) = 1\n // g = 8: 2 -> 3, 8 - (7 + 1 || 0) = 0\n\n const lines = Array.isArray(input) ? input : findLines(input);\n const line = lines.findIndex(line_index => line_index >= index) + 1;\n const column = index - (lines[line - 2] + 1 || 0);\n\n return { line, column };\n}\n\nexport function getLine(input: string, position: Position): string {\n const lines = findLines(input);\n const start = lines[position.line - 2] || 0;\n const end = lines[position.line - 1] || input.length;\n\n return input.substr(start, end - start);\n}\n\nexport function findLines(input: string): number[] {\n // exec is stateful, so create new regexp each time\n const BY_NEW_LINE = /[\\r\\n|\\n]/g;\n const indexes: number[] = [];\n\n let match;\n while ((match = BY_NEW_LINE.exec(input)) != null) {\n indexes.push(match.index);\n }\n indexes.push(input.length + 1);\n\n return indexes;\n}\n\nexport function clonePosition(position: Position): Position {\n return { line: position.line, column: position.column };\n}\n\nexport function cloneLocation(location: Location): Location {\n return { start: clonePosition(location.start), end: clonePosition(location.end) };\n}\n\nexport function zero(): Position {\n return { line: 1, column: 0 };\n}\n","import { Position, getLine } from './location';\n\nexport default class ParseError extends Error {\n line: number;\n column: number;\n\n constructor(input: string, position: Position, message: string) {\n let error_message = `Error parsing TOML (${position.line}, ${position.column + 1}):\\n`;\n\n if (input) {\n const line = getLine(input, position);\n const pointer = `${whitespace(position.column)}^`;\n\n if (line) error_message += `${line}\\n${pointer}\\n`;\n }\n error_message += message;\n\n super(error_message);\n\n this.line = position.line;\n this.column = position.column;\n }\n}\n\nexport function isParseError(error: Error): error is ParseError {\n return error && Object.prototype.hasOwnProperty.call(error, 'line');\n}\n\nfunction whitespace(count: number, character: string = ' '): string {\n return character.repeat(count);\n}\n","export function last(values: TValue[]): TValue | undefined {\n return values[values.length - 1];\n}\n\nexport type BlankObject = { [key: string]: any };\n\nexport function blank(): BlankObject {\n return Object.create(null);\n}\n\nexport function isString(value: any): value is string {\n return typeof value === 'string';\n}\n\nexport function isInteger(value: any): value is number {\n return typeof value === 'number' && value % 1 === 0;\n}\n\nexport function isFloat(value: any): value is number {\n return typeof value === 'number' && !isInteger(value);\n}\n\nexport function isBoolean(value: any): value is boolean {\n return typeof value === 'boolean';\n}\n\nexport function isDate(value: any): value is Date {\n return Object.prototype.toString.call(value) === '[object Date]';\n}\n\nexport function isObject(value: any): boolean {\n return value && typeof value === 'object' && !isDate(value) && !Array.isArray(value);\n}\n\nexport function isIterable(value: any): value is Iterable {\n return value != null && typeof value[Symbol.iterator] === 'function';\n}\n\nexport function has(object: any, key: string): boolean {\n return Object.prototype.hasOwnProperty.call(object, key);\n}\n\nexport function arraysEqual(a: TItem[], b: TItem[]): boolean {\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) return false;\n }\n\n return true;\n}\n\nexport function datesEqual(a: any, b: any): boolean {\n return isDate(a) && isDate(b) && a.toISOString() === b.toISOString();\n}\n\nexport function pipe(value: TValue, ...fns: Array<(value: TValue) => TValue>): TValue {\n return fns.reduce((value, fn) => fn(value), value);\n}\n\nexport function stableStringify(object: any): string {\n if (isObject(object)) {\n const key_values = Object.keys(object)\n .sort()\n .map(key => `${JSON.stringify(key)}:${stableStringify(object[key])}`);\n\n return `{${key_values.join(',')}}`;\n } else if (Array.isArray(object)) {\n return `[${object.map(stableStringify).join(',')}]`;\n } else {\n return JSON.stringify(object);\n }\n}\n\nexport function merge(target: TValue[], values: TValue[]) {\n // __mutating__: merge values into target\n // Reference: https://dev.to/uilicious/javascript-array-push-is-945x-faster-than-array-concat-1oki\n const original_length = target.length;\n const added_length = values.length;\n target.length = original_length + added_length;\n\n for (let i = 0; i < added_length; i++) {\n target[original_length + i] = values[i];\n }\n}\n","import { SINGLE_QUOTE, DOUBLE_QUOTE } from './tokenizer';\nimport { pipe } from './utils';\n\nconst TRIPLE_DOUBLE_QUOTE = `\"\"\"`;\nconst TRIPLE_SINGLE_QUOTE = `'''`;\nconst LF = '\\\\n';\nconst CRLF = '\\\\r\\\\n';\nconst IS_CRLF = /\\r\\n/g;\nconst IS_LF = /\\n/g;\nconst IS_LEADING_NEW_LINE = /^(\\r\\n|\\n)/;\nconst IS_LINE_ENDING_BACKSLASH = /\\\\\\s*[\\n\\r\\n]\\s*/g;\n\nexport function parseString(raw: string): string {\n if (raw.startsWith(TRIPLE_SINGLE_QUOTE)) {\n return pipe(\n trim(raw, 3),\n trimLeadingWhitespace\n );\n } else if (raw.startsWith(SINGLE_QUOTE)) {\n return trim(raw, 1);\n } else if (raw.startsWith(TRIPLE_DOUBLE_QUOTE)) {\n return pipe(\n trim(raw, 3),\n trimLeadingWhitespace,\n lineEndingBackslash,\n escapeNewLines,\n unescape\n );\n } else if (raw.startsWith(DOUBLE_QUOTE)) {\n return pipe(\n trim(raw, 1),\n unescape\n );\n } else {\n return raw;\n }\n}\n\nexport function unescape(escaped: string): string {\n // JSON.parse handles everything except \\UXXXXXXXX\n // replace those instances with code point, escape that, and then parse\n const LARGE_UNICODE = /\\\\U[a-fA-F0-9]{8}/g;\n const json_escaped = escaped.replace(LARGE_UNICODE, value => {\n const code_point = parseInt(value.replace('\\\\U', ''), 16);\n const as_string = String.fromCodePoint(code_point);\n\n return trim(JSON.stringify(as_string), 1);\n });\n\n return JSON.parse(`\"${json_escaped}\"`);\n}\n\nexport function escape(value: string): string {\n return trim(JSON.stringify(value), 1);\n}\n\nfunction trim(value: string, count: number): string {\n return value.substr(count, value.length - count * 2);\n}\n\nfunction trimLeadingWhitespace(value: string): string {\n return IS_LEADING_NEW_LINE.test(value) ? value.substr(1) : value;\n}\n\nfunction escapeNewLines(value: string): string {\n return value.replace(IS_CRLF, CRLF).replace(IS_LF, LF);\n}\n\nfunction lineEndingBackslash(value: string): string {\n return value.replace(IS_LINE_ENDING_BACKSLASH, '');\n}\n","import {\n NodeType,\n KeyValue,\n Table,\n TableKey,\n TableArray,\n TableArrayKey,\n Key,\n Value,\n String,\n Integer,\n Float,\n Boolean,\n DateTime,\n InlineTable,\n InlineArray,\n InlineItem,\n Comment,\n AST,\n Block\n} from './ast';\nimport { Token, TokenType, tokenize, DOUBLE_QUOTE, SINGLE_QUOTE } from './tokenizer';\nimport { parseString } from './parse-string';\nimport Cursor from './cursor';\nimport { clonePosition, cloneLocation } from './location';\nimport ParseError from './parse-error';\nimport { merge } from './utils';\n\nconst TRUE = 'true';\nconst FALSE = 'false';\nconst HAS_E = /e/i;\nconst IS_DIVIDER = /\\_/g;\nconst IS_INF = /inf/;\nconst IS_NAN = /nan/;\nconst IS_HEX = /^0x/;\nconst IS_OCTAL = /^0o/;\nconst IS_BINARY = /^0b/;\nexport const IS_FULL_DATE = /(\\d{4})-(\\d{2})-(\\d{2})/;\nexport const IS_FULL_TIME = /(\\d{2}):(\\d{2}):(\\d{2})/;\n\nexport default function* parseTOML(input: string): AST {\n const tokens = tokenize(input);\n const cursor = new Cursor(tokens);\n\n while (!cursor.next().done) {\n yield* walkBlock(cursor, input);\n }\n}\n\nfunction* walkBlock(cursor: Cursor, input: string): IterableIterator {\n if (cursor.value!.type === TokenType.Comment) {\n yield comment(cursor);\n } else if (cursor.value!.type === TokenType.Bracket) {\n yield table(cursor, input);\n } else if (cursor.value!.type === TokenType.Literal) {\n yield* keyValue(cursor, input);\n } else {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Unexpected token \"${cursor.value!.type}\". Expected Comment, Bracket, or String`\n );\n }\n}\n\nfunction* walkValue(cursor: Cursor, input: string): IterableIterator {\n if (cursor.value!.type === TokenType.Literal) {\n if (cursor.value!.raw[0] === DOUBLE_QUOTE || cursor.value!.raw[0] === SINGLE_QUOTE) {\n yield string(cursor);\n } else if (cursor.value!.raw === TRUE || cursor.value!.raw === FALSE) {\n yield boolean(cursor);\n } else if (IS_FULL_DATE.test(cursor.value!.raw) || IS_FULL_TIME.test(cursor.value!.raw)) {\n yield datetime(cursor, input);\n } else if (\n (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) ||\n IS_INF.test(cursor.value!.raw) ||\n IS_NAN.test(cursor.value!.raw) ||\n (HAS_E.test(cursor.value!.raw) && !IS_HEX.test(cursor.value!.raw))\n ) {\n yield float(cursor, input);\n } else {\n yield integer(cursor);\n }\n } else if (cursor.value!.type === TokenType.Curly) {\n yield inlineTable(cursor, input);\n } else if (cursor.value!.type === TokenType.Bracket) {\n const [inline_array, comments] = inlineArray(cursor, input);\n\n yield inline_array;\n yield* comments;\n } else {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Unrecognized token type \"${cursor.value!.type}\". Expected String, Curly, or Bracket`\n );\n }\n}\n\nfunction comment(cursor: Cursor): Comment {\n // # line comment\n // ^------------^ Comment\n return {\n type: NodeType.Comment,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw\n };\n}\n\nfunction table(cursor: Cursor, input: string): Table | TableArray {\n // Table or TableArray\n //\n // [ key ]\n // ^-----^ TableKey\n // ^-^ Key\n //\n // [[ key ]]\n // ^ ------^ TableArrayKey\n // ^-^ Key\n //\n // a = \"b\" < Items\n // # c |\n // d = \"f\" <\n //\n // ...\n const type =\n !cursor.peek().done && cursor.peek().value!.type === TokenType.Bracket\n ? NodeType.TableArray\n : NodeType.Table;\n const is_table = type === NodeType.Table;\n\n if (is_table && cursor.value!.raw !== '[') {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected table opening \"[\", found ${cursor.value!.raw}`\n );\n }\n if (!is_table && (cursor.value!.raw !== '[' || cursor.peek().value!.raw !== '[')) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected array of tables opening \"[[\", found ${cursor.value!.raw + cursor.peek().value!.raw}`\n );\n }\n\n // Set start location from opening tag\n const key = is_table\n ? ({\n type: NodeType.TableKey,\n loc: cursor.value!.loc\n } as Partial)\n : ({\n type: NodeType.TableArrayKey,\n loc: cursor.value!.loc\n } as Partial);\n\n // Skip to cursor.value for key value\n cursor.next();\n if (type === NodeType.TableArray) cursor.next();\n\n if (cursor.done) {\n throw new ParseError(input, key.loc!.start, `Expected table key, reached end of file`);\n }\n\n key.item = {\n type: NodeType.Key,\n loc: cloneLocation(cursor.value!.loc),\n raw: cursor.value!.raw,\n value: [parseString(cursor.value!.raw)]\n };\n\n while (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n cursor.next();\n const dot = cursor.value!;\n\n cursor.next();\n const before = ' '.repeat(dot.loc.start.column - key.item.loc.end.column);\n const after = ' '.repeat(cursor.value!.loc.start.column - dot.loc.end.column);\n\n key.item.loc.end = cursor.value!.loc.end;\n key.item.raw += `${before}.${after}${cursor.value!.raw}`;\n key.item.value.push(parseString(cursor.value!.raw));\n }\n\n cursor.next();\n\n if (is_table && (cursor.done || cursor.value!.raw !== ']')) {\n throw new ParseError(\n input,\n cursor.done ? key.item.loc.end : cursor.value!.loc.start,\n `Expected table closing \"]\", found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n if (\n !is_table &&\n (cursor.done ||\n cursor.peek().done ||\n cursor.value!.raw !== ']' ||\n cursor.peek().value!.raw !== ']')\n ) {\n throw new ParseError(\n input,\n cursor.done || cursor.peek().done ? key.item.loc.end : cursor.value!.loc.start,\n `Expected array of tables closing \"]]\", found ${\n cursor.done || cursor.peek().done\n ? 'end of file'\n : cursor.value!.raw + cursor.peek().value!.raw\n }`\n );\n }\n\n // Set end location from closing tag\n if (!is_table) cursor.next();\n key.loc!.end = cursor.value!.loc.end;\n\n // Add child items\n let items: Array = [];\n while (!cursor.peek().done && cursor.peek().value!.type !== TokenType.Bracket) {\n cursor.next();\n merge(items, [...walkBlock(cursor, input)] as Array);\n }\n\n return {\n type: is_table ? NodeType.Table : NodeType.TableArray,\n loc: {\n start: clonePosition(key.loc!.start),\n end: items.length\n ? clonePosition(items[items.length - 1].loc.end)\n : clonePosition(key.loc!.end)\n },\n key: key as TableKey | TableArrayKey,\n items\n } as Table | TableArray;\n}\n\nfunction keyValue(cursor: Cursor, input: string): Array {\n // 3. KeyValue\n //\n // key = value\n // ^-^ key\n // ^ equals\n // ^---^ value\n const key: Key = {\n type: NodeType.Key,\n loc: cloneLocation(cursor.value!.loc),\n raw: cursor.value!.raw,\n value: [parseString(cursor.value!.raw)]\n };\n\n while (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n cursor.next();\n cursor.next();\n\n key.loc.end = cursor.value!.loc.end;\n key.raw += `.${cursor.value!.raw}`;\n key.value.push(parseString(cursor.value!.raw));\n }\n\n cursor.next();\n\n if (cursor.done || cursor.value!.type !== TokenType.Equal) {\n throw new ParseError(\n input,\n cursor.done ? key.loc.end : cursor.value!.loc.start,\n `Expected \"=\" for key-value, found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n\n const equals = cursor.value!.loc.start.column;\n\n cursor.next();\n\n if (cursor.done) {\n throw new ParseError(input, key.loc.start, `Expected value for key-value, reached end of file`);\n }\n\n const [value, ...comments] = walkValue(cursor, input) as Iterable;\n\n return [\n {\n type: NodeType.KeyValue,\n key,\n value: value as Value,\n loc: {\n start: clonePosition(key.loc.start),\n end: clonePosition(value.loc.end)\n },\n equals\n },\n ...(comments as Comment[])\n ];\n}\n\nfunction string(cursor: Cursor): String {\n return {\n type: NodeType.String,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw,\n value: parseString(cursor.value!.raw)\n };\n}\n\nfunction boolean(cursor: Cursor): Boolean {\n return {\n type: NodeType.Boolean,\n loc: cursor.value!.loc,\n value: cursor.value!.raw === TRUE\n };\n}\n\nfunction datetime(cursor: Cursor, input: string): DateTime {\n // Possible values:\n //\n // Offset Date-Time\n // | odt1 = 1979-05-27T07:32:00Z\n // | odt2 = 1979-05-27T00:32:00-07:00\n // | odt3 = 1979-05-27T00:32:00.999999-07:00\n // | odt4 = 1979-05-27 07:32:00Z\n //\n // Local Date-Time\n // | ldt1 = 1979-05-27T07:32:00\n // | ldt2 = 1979-05-27T00:32:00.999999\n //\n // Local Date\n // | ld1 = 1979-05-27\n //\n // Local Time\n // | lt1 = 07:32:00\n // | lt2 = 00:32:00.999999\n let loc = cursor.value!.loc;\n let raw = cursor.value!.raw;\n let value: Date;\n\n // If next token is string,\n // check if raw is full date and following is full time\n if (\n !cursor.peek().done &&\n cursor.peek().value!.type === TokenType.Literal &&\n IS_FULL_DATE.test(raw) &&\n IS_FULL_TIME.test(cursor.peek().value!.raw)\n ) {\n const start = loc.start;\n\n cursor.next();\n loc = { start, end: cursor.value!.loc.end };\n raw += ` ${cursor.value!.raw}`;\n }\n\n if (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n const start = loc.start;\n\n cursor.next();\n\n if (cursor.peek().done || cursor.peek().value!.type !== TokenType.Literal) {\n throw new ParseError(input, cursor.value!.loc.end, `Expected fractional value for DateTime`);\n }\n cursor.next();\n\n loc = { start, end: cursor.value!.loc.end };\n raw += `.${cursor.value!.raw}`;\n }\n\n if (!IS_FULL_DATE.test(raw)) {\n // For local time, use local ISO date\n const [local_date] = new Date().toISOString().split('T');\n value = new Date(`${local_date}T${raw}`);\n } else {\n value = new Date(raw.replace(' ', 'T'));\n }\n\n return {\n type: NodeType.DateTime,\n loc,\n raw,\n value\n };\n}\n\nfunction float(cursor: Cursor, input: string): Float {\n let loc = cursor.value!.loc;\n let raw = cursor.value!.raw;\n let value;\n\n if (IS_INF.test(raw)) {\n value = raw === '-inf' ? -Infinity : Infinity;\n } else if (IS_NAN.test(raw)) {\n value = raw === '-nan' ? -NaN : NaN;\n } else if (!cursor.peek().done && cursor.peek().value!.type === TokenType.Dot) {\n const start = loc.start;\n\n // From spec:\n // | A fractional part is a decimal point followed by one or more digits.\n //\n // -> Don't have to handle \"4.\" (i.e. nothing behind decimal place)\n\n cursor.next();\n\n if (cursor.peek().done || cursor.peek().value!.type !== TokenType.Literal) {\n throw new ParseError(input, cursor.value!.loc.end, `Expected fraction value for Float`);\n }\n cursor.next();\n\n raw += `.${cursor.value!.raw}`;\n loc = { start, end: cursor.value!.loc.end };\n value = Number(raw.replace(IS_DIVIDER, ''));\n } else {\n value = Number(raw.replace(IS_DIVIDER, ''));\n }\n\n return { type: NodeType.Float, loc, raw, value };\n}\n\nfunction integer(cursor: Cursor): Integer {\n // > Integer values -0 and +0 are valid and identical to an unprefixed zero\n if (cursor.value!.raw === '-0' || cursor.value!.raw === '+0') {\n return {\n type: NodeType.Integer,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw,\n value: 0\n };\n }\n\n let radix = 10;\n if (IS_HEX.test(cursor.value!.raw)) {\n radix = 16;\n } else if (IS_OCTAL.test(cursor.value!.raw)) {\n radix = 8;\n } else if (IS_BINARY.test(cursor.value!.raw)) {\n radix = 2;\n }\n\n const value = parseInt(\n cursor\n .value!.raw.replace(IS_DIVIDER, '')\n .replace(IS_OCTAL, '')\n .replace(IS_BINARY, ''),\n radix\n );\n\n return {\n type: NodeType.Integer,\n loc: cursor.value!.loc,\n raw: cursor.value!.raw,\n value\n };\n}\n\nfunction inlineTable(cursor: Cursor, input: string): InlineTable {\n if (cursor.value!.raw !== '{') {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected \"{\" for inline table, found ${cursor.value!.raw}`\n );\n }\n\n // 6. InlineTable\n const value: InlineTable = {\n type: NodeType.InlineTable,\n loc: cloneLocation(cursor.value!.loc),\n items: []\n };\n\n cursor.next();\n\n while (\n !cursor.done &&\n !(cursor.value!.type === TokenType.Curly && (cursor.value as Token).raw === '}')\n ) {\n if ((cursor.value as Token).type === TokenType.Comma) {\n const previous = value.items[value.items.length - 1];\n if (!previous) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n 'Found \",\" without previous value in inline table'\n );\n }\n\n previous.comma = true;\n previous.loc.end = cursor.value!.loc.start;\n\n cursor.next();\n continue;\n }\n\n const [item] = walkBlock(cursor, input);\n if (item.type !== NodeType.KeyValue) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Only key-values are supported in inline tables, found ${item.type}`\n );\n }\n\n const inline_item: InlineItem = {\n type: NodeType.InlineItem,\n loc: cloneLocation(item.loc),\n item,\n comma: false\n };\n\n value.items.push(inline_item);\n cursor.next();\n }\n\n if (\n cursor.done ||\n cursor.value!.type !== TokenType.Curly ||\n (cursor.value as Token).raw !== '}'\n ) {\n throw new ParseError(\n input,\n cursor.done ? value.loc.start : cursor.value!.loc.start,\n `Expected \"}\", found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n\n value.loc.end = cursor.value!.loc.end;\n\n return value;\n}\n\nfunction inlineArray(cursor: Cursor, input: string): [InlineArray, Comment[]] {\n // 7. InlineArray\n if (cursor.value!.raw !== '[') {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n `Expected \"[\" for inline array, found ${cursor.value!.raw}`\n );\n }\n\n const value: InlineArray = {\n type: NodeType.InlineArray,\n loc: cloneLocation(cursor.value!.loc),\n items: []\n };\n let comments: Comment[] = [];\n\n cursor.next();\n\n while (\n !cursor.done &&\n !(cursor.value!.type === TokenType.Bracket && (cursor.value as Token).raw === ']')\n ) {\n if ((cursor.value as Token).type === TokenType.Comma) {\n const previous = value.items[value.items.length - 1];\n if (!previous) {\n throw new ParseError(\n input,\n cursor.value!.loc.start,\n 'Found \",\" without previous value for inline array'\n );\n }\n\n previous.comma = true;\n previous.loc.end = cursor.value!.loc.start;\n } else if ((cursor.value as Token).type === TokenType.Comment) {\n comments.push(comment(cursor));\n } else {\n const [item, ...additional_comments] = walkValue(cursor, input);\n const inline_item: InlineItem = {\n type: NodeType.InlineItem,\n loc: cloneLocation(item.loc),\n item,\n comma: false\n };\n\n value.items.push(inline_item);\n merge(comments, additional_comments as Comment[]);\n }\n\n cursor.next();\n }\n\n if (\n cursor.done ||\n cursor.value!.type !== TokenType.Bracket ||\n (cursor.value as Token).raw !== ']'\n ) {\n throw new ParseError(\n input,\n cursor.done ? value.loc.start : cursor.value!.loc.start,\n `Expected \"]\", found ${cursor.done ? 'end of file' : cursor.value!.raw}`\n );\n }\n\n value.loc.end = cursor.value!.loc.end;\n\n return [value, comments];\n}\n","import {\n NodeType,\n AST,\n Node,\n Document,\n Table,\n TableKey,\n TableArray,\n TableArrayKey,\n KeyValue,\n Key,\n String,\n Integer,\n Float,\n Boolean,\n DateTime,\n Comment,\n InlineArray,\n InlineTable,\n InlineItem\n} from './ast';\nimport { isIterable } from './utils';\n\nexport type Visit = (node: TNode, parent: TNode | null) => void;\nexport type EnterExit = { enter?: Visit; exit?: Visit };\n\nexport type Visitor = {\n Document?: Visit | EnterExit;\n Table?: Visit
| EnterExit
;\n TableKey?: Visit | EnterExit;\n TableArray?: Visit | EnterExit;\n TableArrayKey?: Visit | EnterExit;\n KeyValue?: Visit | EnterExit;\n Key?: Visit | EnterExit;\n String?: Visit | EnterExit;\n Integer?: Visit | EnterExit;\n Float?: Visit | EnterExit;\n Boolean?: Visit | EnterExit;\n DateTime?: Visit | EnterExit;\n InlineArray?: Visit | EnterExit;\n InlineItem?: Visit | EnterExit;\n InlineTable?: Visit | EnterExit;\n Comment?: Visit | EnterExit;\n};\n\nexport default function traverse(ast: AST | Node, visitor: Visitor) {\n if (isIterable(ast)) {\n traverseArray(ast, null);\n } else {\n traverseNode(ast, null);\n }\n\n function traverseArray(array: Iterable, parent: Node | null) {\n for (const node of array) {\n traverseNode(node, parent);\n }\n }\n\n function traverseNode(node: Node, parent: Node | null) {\n const visit = visitor[node.type];\n\n if (visit && typeof visit === 'function') {\n (visit as Visit)(node, parent);\n }\n if (visit && (visit as EnterExit).enter) {\n (visit as EnterExit).enter!(node, parent);\n }\n\n switch (node.type) {\n case NodeType.Document:\n traverseArray((node as Document).items, node);\n break;\n\n case NodeType.Table:\n traverseNode((node as Table).key, node);\n traverseArray((node as Table).items, node);\n break;\n case NodeType.TableKey:\n traverseNode((node as TableKey).item, node);\n break;\n\n case NodeType.TableArray:\n traverseNode((node as TableArray).key, node);\n traverseArray((node as TableArray).items, node);\n break;\n case NodeType.TableArrayKey:\n traverseNode((node as TableArrayKey).item, node);\n break;\n\n case NodeType.KeyValue:\n traverseNode((node as KeyValue).key, node);\n traverseNode((node as KeyValue).value, node);\n break;\n\n case NodeType.InlineArray:\n traverseArray((node as InlineArray).items, node);\n break;\n case NodeType.InlineItem:\n traverseNode((node as InlineItem).item, node);\n break;\n\n case NodeType.InlineTable:\n traverseArray((node as InlineTable).items, node);\n break;\n\n case NodeType.Key:\n case NodeType.String:\n case NodeType.Integer:\n case NodeType.Float:\n case NodeType.Boolean:\n case NodeType.DateTime:\n case NodeType.Comment:\n break;\n\n default:\n throw new Error(`Unrecognized node type \"${node.type}\"`);\n }\n\n if (visit && (visit as EnterExit).exit) {\n (visit as EnterExit).exit!(node, parent);\n }\n }\n}\n","import {\n NodeType,\n Node,\n Document,\n Key,\n Value,\n InlineArray,\n InlineArrayItem,\n InlineTableItem,\n isKeyValue,\n isTable,\n isTableArray,\n isInlineTable,\n isInlineArray,\n hasItems,\n hasItem,\n isComment,\n isDocument,\n InlineTable,\n TableArray,\n Table,\n KeyValue,\n Comment,\n InlineItem,\n isInlineItem,\n Block,\n isBlock\n} from './ast';\nimport { Span, getSpan, clonePosition } from './location';\nimport { last } from './utils';\nimport traverse from './traverse';\n\nexport type Root = Document | Node;\n\n// Store line and column offsets per node\n//\n// Some offsets are applied on enter (e.g. shift child items and next items)\n// Others are applied on exit (e.g. shift next items)\ntype Offsets = WeakMap;\n\nconst enter_offsets: WeakMap = new WeakMap();\nconst getEnter = (root: Root) => {\n if (!enter_offsets.has(root)) {\n enter_offsets.set(root, new WeakMap());\n }\n return enter_offsets.get(root)!;\n};\n\nconst exit_offsets: WeakMap = new WeakMap();\nconst getExit = (root: Root) => {\n if (!exit_offsets.has(root)) {\n exit_offsets.set(root, new WeakMap());\n }\n return exit_offsets.get(root)!;\n};\n\nexport function replace(root: Root, parent: Node, existing: Node, replacement: Node) {\n // First, replace existing node\n // (by index for items, item, or key/value)\n if (hasItems(parent)) {\n const index = parent.items.indexOf(existing);\n if (index < 0) throw new Error(`Could not find existing item in parent node for replace`);\n\n parent.items.splice(index, 1, replacement);\n } else if (hasItem(parent)) {\n parent.item = replacement;\n } else if (isKeyValue(parent)) {\n if (parent.key === existing) {\n parent.key = replacement as Key;\n } else {\n parent.value = replacement as Value;\n }\n } else {\n throw new Error(`Unsupported parent type \"${parent.type}\" for replace`);\n }\n\n // Shift the replacement node into the same start position as existing\n const shift = {\n lines: existing.loc.start.line - replacement.loc.start.line,\n columns: existing.loc.start.column - replacement.loc.start.column\n };\n shiftNode(replacement, shift);\n\n // Apply offsets after replacement node\n const existing_span = getSpan(existing.loc);\n const replacement_span = getSpan(replacement.loc);\n const offset = {\n lines: replacement_span.lines - existing_span.lines,\n columns: replacement_span.columns - existing_span.columns\n };\n\n addOffset(offset, getExit(root), replacement, existing);\n}\n\nexport function insert(root: Root, parent: Node, child: Node, index?: number) {\n if (!hasItems(parent)) {\n throw new Error(`Unsupported parent type \"${(parent as Node).type}\" for insert`);\n }\n\n index = index != null ? index : parent.items.length;\n\n let shift: Span;\n let offset: Span;\n if (isInlineArray(parent) || isInlineTable(parent)) {\n ({ shift, offset } = insertInline(parent, child as InlineItem, index));\n } else {\n ({ shift, offset } = insertOnNewLine(\n parent as Document | Table | TableArray,\n child as KeyValue | Comment,\n index\n ));\n }\n\n shiftNode(child, shift);\n\n // The child element is placed relative to the previous element,\n // if the previous element has an offset, need to position relative to that\n // -> Move previous offset to child's offset\n const previous = parent.items[index - 1];\n const previous_offset = previous && getExit(root).get(previous);\n if (previous_offset) {\n offset.lines += previous_offset.lines;\n offset.columns += previous_offset.columns;\n\n // Account for comma overlay\n //\n // a = [b, e]\n // a = [b, c, e]\n // ^---^\n // a = [b, c, d, e]\n // ^---^\n if (isInlineItem(child) && previous && parent.items[index + 1]) {\n offset.columns -= 2;\n }\n\n getExit(root).delete(previous!);\n }\n\n const offsets = getExit(root);\n offsets.set(child, offset);\n}\n\nfunction insertOnNewLine(\n parent: Document | Table | TableArray,\n child: Block,\n index: number\n): { shift: Span; offset: Span } {\n if (!isBlock(child)) {\n throw new Error(`Incompatible child type \"${(child as Node).type}\"`);\n }\n\n const previous = parent.items[index - 1];\n const use_first_line = isDocument(parent) && !parent.items.length;\n\n parent.items.splice(index, 0, child);\n\n // Set start location from previous item or start of array\n // (previous is undefined for empty array or inserting at first item)\n const start = previous\n ? {\n line: previous.loc.end.line,\n column: !isComment(previous) ? previous.loc.start.column : parent.loc.start.column\n }\n : clonePosition(parent.loc.start);\n\n const is_block = isTable(child) || isTableArray(child);\n let leading_lines = 0;\n if (use_first_line) {\n // 0 leading lines\n } else if (is_block) {\n leading_lines = 2;\n } else {\n leading_lines = 1;\n }\n start.line += leading_lines;\n\n const shift = {\n lines: start.line - child.loc.start.line,\n columns: start.column - child.loc.start.column\n };\n\n // Apply offsets after child node\n const child_span = getSpan(child.loc);\n const offset = {\n lines: child_span.lines + (leading_lines - 1),\n columns: child_span.columns\n };\n\n return { shift, offset };\n}\n\nfunction insertInline(\n parent: InlineArray | InlineTable,\n child: InlineItem,\n index: number\n): { shift: Span; offset: Span } {\n if (!isInlineItem(child)) {\n throw new Error(`Incompatible child type \"${(child as Node).type}\"`);\n }\n\n // Store preceding node and insert\n const previous = index != null ? parent.items[index - 1] : last(parent.items);\n const is_last = index == null || index === parent.items.length;\n\n parent.items.splice(index, 0, child);\n\n // Add commas as-needed\n const leading_comma = !!previous;\n const trailing_comma = !is_last;\n const last_comma = is_last && child.comma === true;\n if (leading_comma) {\n previous!.comma = true;\n }\n if (trailing_comma) {\n child.comma = true;\n }\n\n // Use a new line for documents, children of Table/TableArray,\n // and if an inline table is using new lines\n const use_new_line = isInlineArray(parent) && perLine(parent);\n\n // Set start location from previous item or start of array\n // (previous is undefined for empty array or inserting at first item)\n const start = previous\n ? {\n line: previous.loc.end.line,\n column: use_new_line\n ? !isComment(previous)\n ? previous.loc.start.column\n : parent.loc.start.column\n : previous.loc.end.column\n }\n : clonePosition(parent.loc.start);\n\n let leading_lines = 0;\n if (use_new_line) {\n leading_lines = 1;\n } else {\n const skip_comma = 2;\n const skip_bracket = 1;\n start.column += leading_comma ? skip_comma : skip_bracket;\n }\n start.line += leading_lines;\n\n const shift = {\n lines: start.line - child.loc.start.line,\n columns: start.column - child.loc.start.column\n };\n\n // Apply offsets after child node\n const child_span = getSpan(child.loc);\n const offset = {\n lines: child_span.lines + (leading_lines - 1),\n columns: child_span.columns + (leading_comma || trailing_comma ? 2 : 0) + (last_comma ? 1 : 0)\n };\n\n return { shift, offset };\n}\n\nexport function remove(root: Root, parent: Node, node: Node) {\n // Remove an element from the parent's items\n // (supports Document, Table, TableArray, InlineTable, and InlineArray\n //\n // X\n // [ 1, 2, 3 ]\n // ^-^\n // -> Remove element 2 and apply 0,-3 offset to 1\n //\n // [table]\n // a = 1\n // b = 2 # X\n // c = 3\n // -> Remove element 2 and apply -1,0 offset to 1\n if (!hasItems(parent)) {\n throw new Error(`Unsupported parent type \"${parent.type}\" for remove`);\n }\n\n let index = parent.items.indexOf(node);\n if (index < 0) {\n // Try again, looking at child items for nodes like InlineArrayItem\n index = parent.items.findIndex(item => hasItem(item) && item.item === node);\n\n if (index < 0) {\n throw new Error('Could not find node in parent for removal');\n }\n\n node = parent.items[index];\n }\n\n const previous = parent.items[index - 1];\n let next = parent.items[index + 1];\n\n // Remove node\n parent.items.splice(index, 1);\n let removed_span = getSpan(node.loc);\n\n // Remove an associated comment that appears on the same line\n //\n // [table]\n // a = 1\n // b = 2 # remove this too\n // c = 3\n //\n // TODO InlineTable - this only applies to comments in Table/TableArray\n if (next && isComment(next) && next.loc.start.line === node.loc.end.line) {\n // Add comment to removed\n removed_span = getSpan({ start: node.loc.start, end: next.loc.end });\n\n // Shift to next item\n // (use same index since node has already been removed)\n next = parent.items[index + 1];\n\n // Remove comment\n parent.items.splice(index, 1);\n }\n\n // For inline tables and arrays, check whether the line should be kept\n const is_inline = previous && isInlineItem(previous);\n const previous_on_same_line = previous && previous.loc.end.line === node.loc.start.line;\n const next_on_sameLine = next && next.loc.start.line === node.loc.end.line;\n const keep_line = is_inline && (previous_on_same_line || next_on_sameLine);\n\n const offset = {\n lines: -(removed_span.lines - (keep_line ? 1 : 0)),\n columns: -removed_span.columns\n };\n\n // Offset for comma and remove comma from previous (if-needed)\n if (is_inline && previous_on_same_line) {\n offset.columns -= 2;\n }\n if (is_inline && previous && !next) {\n (previous as InlineArrayItem | InlineTableItem).comma = false;\n }\n\n // Apply offsets after preceding node or before children of parent node\n const target = previous || parent;\n const target_offsets = previous ? getExit(root) : getEnter(root);\n const node_offsets = getExit(root);\n const previous_offset = target_offsets.get(target);\n if (previous_offset) {\n offset.lines += previous_offset.lines;\n offset.columns += previous_offset.columns;\n }\n const removed_offset = node_offsets.get(node);\n if (removed_offset) {\n offset.lines += removed_offset.lines;\n offset.columns += removed_offset.columns;\n }\n\n target_offsets.set(target, offset);\n}\n\nexport function applyBracketSpacing(\n root: Root,\n node: InlineArray | InlineTable,\n bracket_spacing: boolean = true\n) {\n // Can only add bracket spacing currently\n if (!bracket_spacing) return;\n if (!node.items.length) return;\n\n // Apply enter to node so that items are affected\n addOffset({ lines: 0, columns: 1 }, getEnter(root), node);\n\n // Apply exit to last node in items\n const last_item = last(node.items as Node[])!;\n addOffset({ lines: 0, columns: 1 }, getExit(root), last_item);\n}\n\nexport function applyTrailingComma(\n root: Root,\n node: InlineArray | InlineTable,\n trailing_commas: boolean = false\n) {\n // Can only add trailing comma currently\n if (!trailing_commas) return;\n if (!node.items.length) return;\n\n const last_item = last(node.items)!;\n last_item.comma = true;\n\n addOffset({ lines: 0, columns: 1 }, getExit(root), last_item);\n}\n\nexport function applyWrites(root: Node) {\n const enter = getEnter(root);\n const exit = getExit(root);\n\n const offset: { lines: number; columns: { [index: number]: number } } = {\n lines: 0,\n columns: {}\n };\n\n function shiftStart(node: Node) {\n node.loc.start.line += offset.lines;\n node.loc.start.column += offset.columns[node.loc.start.line] || 0;\n\n const entering = enter.get(node);\n if (entering) {\n offset.lines += entering.lines;\n offset.columns[node.loc.start.line] =\n (offset.columns[node.loc.start.line] || 0) + entering.columns;\n }\n }\n function shiftEnd(node: Node) {\n node.loc.end.line += offset.lines;\n node.loc.end.column += offset.columns[node.loc.end.line] || 0;\n\n const exiting = exit.get(node);\n if (exiting) {\n offset.lines += exiting.lines;\n offset.columns[node.loc.end.line] =\n (offset.columns[node.loc.end.line] || 0) + exiting.columns;\n }\n }\n const shiftLocation = {\n enter: shiftStart,\n exit: shiftEnd\n };\n\n traverse(root, {\n [NodeType.Document]: shiftLocation,\n [NodeType.Table]: shiftLocation,\n [NodeType.TableArray]: shiftLocation,\n [NodeType.InlineTable]: shiftLocation,\n [NodeType.InlineArray]: shiftLocation,\n\n [NodeType.InlineItem]: shiftLocation,\n [NodeType.TableKey]: shiftLocation,\n [NodeType.TableArrayKey]: shiftLocation,\n\n [NodeType.KeyValue]: {\n enter(node) {\n const start_line = node.loc.start.line + offset.lines;\n const key_offset = exit.get(node.key);\n node.equals += (offset.columns[start_line] || 0) + (key_offset ? key_offset.columns : 0);\n\n shiftStart(node);\n },\n exit: shiftEnd\n },\n\n [NodeType.Key]: shiftLocation,\n [NodeType.String]: shiftLocation,\n [NodeType.Integer]: shiftLocation,\n [NodeType.Float]: shiftLocation,\n [NodeType.Boolean]: shiftLocation,\n [NodeType.DateTime]: shiftLocation,\n [NodeType.Comment]: shiftLocation\n });\n\n enter_offsets.delete(root);\n exit_offsets.delete(root);\n}\n\nexport function shiftNode(\n node: Node,\n span: Span,\n options: { first_line_only?: boolean } = {}\n): Node {\n const { first_line_only = false } = options;\n const start_line = node.loc.start.line;\n const { lines, columns } = span;\n const move = (node: Node) => {\n if (!first_line_only || node.loc.start.line === start_line) {\n node.loc.start.column += columns;\n node.loc.end.column += columns;\n }\n node.loc.start.line += lines;\n node.loc.end.line += lines;\n };\n\n traverse(node, {\n [NodeType.Table]: move,\n [NodeType.TableKey]: move,\n [NodeType.TableArray]: move,\n [NodeType.TableArrayKey]: move,\n [NodeType.KeyValue](node) {\n move(node);\n node.equals += columns;\n },\n [NodeType.Key]: move,\n [NodeType.String]: move,\n [NodeType.Integer]: move,\n [NodeType.Float]: move,\n [NodeType.Boolean]: move,\n [NodeType.DateTime]: move,\n [NodeType.InlineArray]: move,\n [NodeType.InlineItem]: move,\n [NodeType.InlineTable]: move,\n [NodeType.Comment]: move\n });\n\n return node;\n}\n\nfunction perLine(array: InlineArray): boolean {\n if (!array.items.length) return false;\n\n const span = getSpan(array.loc);\n return span.lines > array.items.length;\n}\n\nfunction addOffset(offset: Span, offsets: Offsets, node: Node, from?: Node) {\n const previous_offset = offsets.get(from || node);\n if (previous_offset) {\n offset.lines += previous_offset.lines;\n offset.columns += previous_offset.columns;\n }\n\n offsets.set(node, offset);\n}\n","import {\n NodeType,\n Document,\n Table,\n TableKey,\n TableArray,\n TableArrayKey,\n Value,\n KeyValue,\n Key,\n String,\n Integer,\n Float,\n Boolean,\n DateTime,\n InlineArray,\n InlineItem,\n InlineTable,\n Comment\n} from './ast';\nimport { zero, cloneLocation, clonePosition } from './location';\nimport { shiftNode } from './writer';\n\nexport function generateDocument(): Document {\n return {\n type: NodeType.Document,\n loc: { start: zero(), end: zero() },\n items: []\n };\n}\n\nexport function generateTable(key: string[]): Table {\n const table_key = generateTableKey(key);\n\n return {\n type: NodeType.Table,\n loc: cloneLocation(table_key.loc),\n key: table_key,\n items: []\n };\n}\n\nexport function generateTableKey(key: string[]): TableKey {\n const raw = keyValueToRaw(key);\n\n return {\n type: NodeType.TableKey,\n loc: {\n start: zero(),\n end: { line: 1, column: raw.length + 2 }\n },\n item: {\n type: NodeType.Key,\n loc: {\n start: { line: 1, column: 1 },\n end: { line: 1, column: raw.length + 1 }\n },\n value: key,\n raw\n }\n };\n}\n\nexport function generateTableArray(key: string[]): TableArray {\n const table_array_key = generateTableArrayKey(key);\n\n return {\n type: NodeType.TableArray,\n loc: cloneLocation(table_array_key.loc),\n key: table_array_key,\n items: []\n };\n}\n\nexport function generateTableArrayKey(key: string[]): TableArrayKey {\n const raw = keyValueToRaw(key);\n\n return {\n type: NodeType.TableArrayKey,\n loc: {\n start: zero(),\n end: { line: 1, column: raw.length + 4 }\n },\n item: {\n type: NodeType.Key,\n loc: {\n start: { line: 1, column: 2 },\n end: { line: 1, column: raw.length + 2 }\n },\n value: key,\n raw\n }\n };\n}\n\nexport function generateKeyValue(key: string[], value: Value): KeyValue {\n const key_node = generateKey(key);\n const { column } = key_node.loc.end;\n\n const equals = column + 1;\n\n shiftNode(\n value,\n { lines: 0, columns: column + 3 - value.loc.start.column },\n { first_line_only: true }\n );\n\n return {\n type: NodeType.KeyValue,\n loc: {\n start: clonePosition(key_node.loc.start),\n end: clonePosition(value.loc.end)\n },\n key: key_node,\n equals,\n value\n };\n}\n\nconst IS_BARE_KEY = /[\\w,\\d,\\_,\\-]+/;\nfunction keyValueToRaw(value: string[]): string {\n return value.map(part => (IS_BARE_KEY.test(part) ? part : JSON.stringify(part))).join('.');\n}\n\nexport function generateKey(value: string[]): Key {\n const raw = keyValueToRaw(value);\n\n return {\n type: NodeType.Key,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateString(value: string): String {\n const raw = JSON.stringify(value);\n\n return {\n type: NodeType.String,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateInteger(value: number): Integer {\n const raw = value.toString();\n\n return {\n type: NodeType.Integer,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateFloat(value: number): Float {\n const raw = value.toString();\n\n return {\n type: NodeType.Float,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateBoolean(value: boolean): Boolean {\n return {\n type: NodeType.Boolean,\n loc: { start: zero(), end: { line: 1, column: value ? 4 : 5 } },\n value\n };\n}\n\nexport function generateDateTime(value: Date): DateTime {\n const raw = value.toISOString();\n\n return {\n type: NodeType.DateTime,\n loc: { start: zero(), end: { line: 1, column: raw.length } },\n raw,\n value\n };\n}\n\nexport function generateInlineArray(): InlineArray {\n return {\n type: NodeType.InlineArray,\n loc: { start: zero(), end: { line: 1, column: 2 } },\n items: []\n };\n}\n\nexport function generateInlineItem(item: KeyValue | Value): InlineItem {\n return {\n type: NodeType.InlineItem,\n loc: cloneLocation(item.loc),\n item,\n comma: false\n };\n}\n\nexport function generateInlineTable(): InlineTable {\n return {\n type: NodeType.InlineTable,\n loc: { start: zero(), end: { line: 1, column: 2 } },\n items: []\n };\n}\n\nexport function generateComment(comment: string): Comment {\n if (!comment.startsWith('#')) comment = `# ${comment}`;\n\n return {\n type: NodeType.Comment,\n loc: { start: zero(), end: { line: 1, column: comment.length } },\n raw: comment\n };\n}\n","import {\n KeyValue,\n Table,\n InlineTable,\n TableArray,\n InlineArray,\n isInlineTable,\n isInlineArray,\n isKeyValue,\n Document\n} from './ast';\nimport { generateTable, generateDocument, generateTableArray } from './generate';\nimport { insert, remove, applyWrites, shiftNode } from './writer';\n\nexport interface Format {\n printWidth?: number;\n tabWidth?: number;\n useTabs?: boolean;\n trailingComma?: boolean;\n bracketSpacing?: boolean;\n}\n\nexport function formatTopLevel(document: Document): Document {\n const move_to_top_level = document.items.filter(item => {\n if (!isKeyValue(item)) return false;\n\n const is_inline_table = isInlineTable(item.value);\n const is_inline_array =\n isInlineArray(item.value) &&\n item.value.items.length &&\n isInlineTable(item.value.items[0].item);\n\n return is_inline_table || is_inline_array;\n }) as KeyValue[];\n\n move_to_top_level.forEach(node => {\n remove(document, document, node);\n\n if (isInlineTable(node.value)) {\n insert(document, document, formatTable(node));\n } else {\n formatTableArray(node).forEach(table_array => {\n insert(document, document, table_array);\n });\n }\n });\n\n applyWrites(document);\n return document;\n}\n\nfunction formatTable(key_value: KeyValue): Table {\n const table = generateTable(key_value.key.value);\n\n for (const item of (key_value.value as InlineTable).items) {\n insert(table, table, item.item);\n }\n\n applyWrites(table);\n return table;\n}\n\nfunction formatTableArray(key_value: KeyValue): TableArray[] {\n const root = generateDocument();\n\n for (const inline_array_item of (key_value.value as InlineArray).items) {\n const table_array = generateTableArray(key_value.key.value);\n insert(root, root, table_array);\n\n for (const inline_table_item of (inline_array_item.item as InlineTable).items) {\n insert(root, table_array, inline_table_item.item);\n }\n }\n\n applyWrites(root);\n return root.items as TableArray[];\n}\n\nexport function formatPrintWidth(document: Document, format: Format): Document {\n // TODO\n return document;\n}\n\nexport function formatEmptyLines(document: Document): Document {\n let shift = 0;\n let previous = 0;\n for (const item of document.items) {\n if (previous === 0 && item.loc.start.line > 1) {\n // Remove leading newlines\n shift = 1 - item.loc.start.line;\n } else if (item.loc.start.line + shift > previous + 2) {\n shift += previous + 2 - (item.loc.start.line + shift);\n }\n\n shiftNode(item, {\n lines: shift,\n columns: 0\n });\n previous = item.loc.end.line;\n }\n\n return document;\n}\n","import { Value, KeyValue, Document, InlineArray, InlineTable } from './ast';\nimport {\n generateDocument,\n generateKeyValue,\n generateInlineItem,\n generateString,\n generateInteger,\n generateFloat,\n generateBoolean,\n generateDateTime,\n generateInlineArray,\n generateInlineTable\n} from './generate';\nimport { Format, formatTopLevel, formatPrintWidth, formatEmptyLines } from './format';\nimport { isObject, isString, isInteger, isFloat, isBoolean, isDate, pipe } from './utils';\nimport { insert, applyWrites, applyBracketSpacing, applyTrailingComma } from './writer';\n\nconst default_format = {\n printWidth: 80,\n trailingComma: false,\n bracketSpacing: true\n};\n\nexport default function parseJS(value: any, format: Format = {}): Document {\n format = Object.assign({}, default_format, format);\n value = toJSON(value);\n\n const document = generateDocument();\n for (const item of walkObject(value, format)) {\n insert(document, document, item);\n }\n applyWrites(document);\n\n // Heuristics:\n // 1. Top-level objects/arrays should be tables/table arrays\n // 2. Convert objects/arrays to tables/table arrays based on print width\n const formatted = pipe(\n document,\n formatTopLevel,\n document => formatPrintWidth(document, format),\n formatEmptyLines\n );\n\n return formatted;\n}\n\nfunction* walkObject(object: any, format: Format): IterableIterator {\n for (const key of Object.keys(object)) {\n yield generateKeyValue([key], walkValue(object[key], format));\n }\n}\n\nfunction walkValue(value: any, format: Format): Value {\n if (value == null) {\n throw new Error('\"null\" and \"undefined\" values are not supported');\n }\n\n if (isString(value)) {\n return generateString(value);\n } else if (isInteger(value)) {\n return generateInteger(value);\n } else if (isFloat(value)) {\n return generateFloat(value);\n } else if (isBoolean(value)) {\n return generateBoolean(value);\n } else if (isDate(value)) {\n return generateDateTime(value);\n } else if (Array.isArray(value)) {\n return walkInlineArray(value, format);\n } else {\n return walkInlineTable(value, format);\n }\n}\n\nfunction walkInlineArray(value: Array, format: Format): InlineArray {\n const inline_array = generateInlineArray();\n for (const element of value) {\n const item = walkValue(element, format);\n const inline_array_item = generateInlineItem(item);\n\n insert(inline_array, inline_array, inline_array_item);\n }\n applyBracketSpacing(inline_array, inline_array, format.bracketSpacing);\n applyTrailingComma(inline_array, inline_array, format.trailingComma);\n applyWrites(inline_array);\n\n return inline_array;\n}\n\nfunction walkInlineTable(value: object, format: Format): InlineTable | Value {\n value = toJSON(value);\n if (!isObject(value)) return walkValue(value, format);\n\n const inline_table = generateInlineTable();\n const items = [...walkObject(value, format)];\n for (const item of items) {\n const inline_table_item = generateInlineItem(item);\n\n insert(inline_table, inline_table, inline_table_item);\n }\n applyBracketSpacing(inline_table, inline_table, format.bracketSpacing);\n applyTrailingComma(inline_table, inline_table, format.trailingComma);\n applyWrites(inline_table);\n\n return inline_table;\n}\n\nfunction toJSON(value: any): any {\n return value && !isDate(value) && typeof value.toJSON === 'function' ? value.toJSON() : value;\n}\n","import { NodeType, AST } from './ast';\nimport traverse from './traverse';\nimport { Location } from './location';\nimport { SPACE } from './tokenizer';\n\nconst BY_NEW_LINE = /(\\r\\n|\\n)/g;\n\nexport default function toTOML(ast: AST, newline: string = '\\n'): string {\n const lines: string[] = [];\n\n traverse(ast, {\n [NodeType.TableKey](node) {\n const { start, end } = node.loc;\n\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '[');\n write(lines, { start: { line: end.line, column: end.column - 1 }, end }, ']');\n },\n [NodeType.TableArrayKey](node) {\n const { start, end } = node.loc;\n\n write(lines, { start, end: { line: start.line, column: start.column + 2 } }, '[[');\n write(lines, { start: { line: end.line, column: end.column - 2 }, end }, ']]');\n },\n\n [NodeType.KeyValue](node) {\n const {\n start: { line }\n } = node.loc;\n write(\n lines,\n { start: { line, column: node.equals }, end: { line, column: node.equals + 1 } },\n '='\n );\n },\n [NodeType.Key](node) {\n write(lines, node.loc, node.raw);\n },\n\n [NodeType.String](node) {\n write(lines, node.loc, node.raw);\n },\n [NodeType.Integer](node) {\n write(lines, node.loc, node.raw);\n },\n [NodeType.Float](node) {\n write(lines, node.loc, node.raw);\n },\n [NodeType.Boolean](node) {\n write(lines, node.loc, node.value.toString());\n },\n [NodeType.DateTime](node) {\n write(lines, node.loc, node.raw);\n },\n\n [NodeType.InlineArray](node) {\n const { start, end } = node.loc;\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '[');\n write(lines, { start: { line: end.line, column: end.column - 1 }, end }, ']');\n },\n\n [NodeType.InlineTable](node) {\n const { start, end } = node.loc;\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, '{');\n write(lines, { start: { line: end.line, column: end.column - 1 }, end }, '}');\n },\n [NodeType.InlineItem](node) {\n if (!node.comma) return;\n\n const start = node.loc.end;\n write(lines, { start, end: { line: start.line, column: start.column + 1 } }, ',');\n },\n\n [NodeType.Comment](node) {\n write(lines, node.loc, node.raw);\n }\n });\n\n return lines.join(newline) + newline;\n}\n\nfunction write(lines: string[], loc: Location, raw: string) {\n const raw_lines = raw.split(BY_NEW_LINE);\n const expected_lines = loc.end.line - loc.start.line + 1;\n\n if (raw_lines.length !== expected_lines) {\n throw new Error(\n `Mismatch between location and raw string, expected ${expected_lines} lines for \"${raw}\"`\n );\n }\n\n for (let i = loc.start.line; i <= loc.end.line; i++) {\n const line = getLine(lines, i);\n const is_start_line = i === loc.start.line;\n const is_end_line = i === loc.end.line;\n\n const before = is_start_line\n ? line.substr(0, loc.start.column).padEnd(loc.start.column, SPACE)\n : '';\n const after = is_end_line ? line.substr(loc.end.column) : '';\n\n lines[i - 1] = before + raw_lines[i - loc.start.line] + after;\n }\n}\n\nfunction getLine(lines: string[], index: number): string {\n if (!lines[index - 1]) {\n for (let i = 0; i < index; i++) {\n if (!lines[i]) lines[i] = '';\n }\n }\n\n return lines[index - 1];\n}\n","import { Value, NodeType, Node, AST, isInlineTable } from './ast';\nimport traverse from './traverse';\nimport { last, blank, isDate, has } from './utils';\nimport ParseError from './parse-error';\n\nexport default function toJS(ast: AST, input: string = ''): any {\n const result = blank();\n const tables: Set = new Set();\n const table_arrays: Set = new Set();\n const defined: Set = new Set();\n let active: any = result;\n let previous_active: any;\n let skip = false;\n\n traverse(ast, {\n [NodeType.Table](node) {\n const key = node.key.item.value;\n try {\n validateKey(result, key, node.type, { tables, table_arrays, defined });\n } catch (err) {\n throw new ParseError(input, node.key.loc.start, err.message);\n }\n\n const joined_key = joinKey(key);\n tables.add(joined_key);\n defined.add(joined_key);\n\n active = ensureTable(result, key);\n },\n\n [NodeType.TableArray](node) {\n const key = node.key.item.value;\n\n try {\n validateKey(result, key, node.type, { tables, table_arrays, defined });\n } catch (err) {\n throw new ParseError(input, node.key.loc.start, err.message);\n }\n\n const joined_key = joinKey(key);\n table_arrays.add(joined_key);\n defined.add(joined_key);\n\n active = ensureTableArray(result, key);\n },\n\n [NodeType.KeyValue]: {\n enter(node) {\n if (skip) return;\n\n const key = node.key.value;\n try {\n validateKey(active, key, node.type, { tables, table_arrays, defined });\n } catch (err) {\n throw new ParseError(input, node.key.loc.start, err.message);\n }\n\n const value = toValue(node.value);\n const target = key.length > 1 ? ensureTable(active, key.slice(0, -1)) : active;\n\n target[last(key)!] = value;\n defined.add(joinKey(key));\n\n if (isInlineTable(node.value)) {\n previous_active = active;\n active = value;\n }\n },\n exit(node) {\n if (isInlineTable(node.value)) {\n active = previous_active;\n }\n }\n },\n\n [NodeType.InlineTable]: {\n enter() {\n // Handled by toValue\n skip = true;\n },\n exit() {\n skip = false;\n }\n }\n });\n\n return result;\n}\n\nexport function toValue(node: Value): any {\n switch (node.type) {\n case NodeType.InlineTable:\n const result = blank();\n\n node.items.forEach(({ item }) => {\n const key = item.key.value;\n const value = toValue(item.value);\n\n const target = key.length > 1 ? ensureTable(result, key.slice(0, -1)) : result;\n target[last(key)!] = value;\n });\n\n return result;\n\n case NodeType.InlineArray:\n return node.items.map(item => toValue(item.item as Value));\n\n case NodeType.String:\n case NodeType.Integer:\n case NodeType.Float:\n case NodeType.Boolean:\n case NodeType.DateTime:\n return node.value;\n\n default:\n throw new Error(`Unrecognized value type \"${(node as Node).type}\"`);\n }\n}\n\nfunction validateKey(\n object: any,\n key: string[],\n type: NodeType.Table | NodeType.TableArray | NodeType.KeyValue,\n state: { tables: Set; table_arrays: Set; defined: Set }\n) {\n // 1. Cannot override primitive value\n let parts: string[] = [];\n let index = 0;\n for (const part of key) {\n parts.push(part);\n\n if (!has(object, part)) return;\n if (isPrimitive(object[part])) {\n throw new Error(`Invalid key, a value has already been defined for ${parts.join('.')}`);\n }\n\n const joined_parts = joinKey(parts);\n if (Array.isArray(object[part]) && !state.table_arrays.has(joined_parts)) {\n throw new Error(`Invalid key, cannot add to a static array at ${joined_parts}`);\n }\n\n const next_is_last = index++ < key.length - 1;\n object = Array.isArray(object[part]) && next_is_last ? last(object[part]) : object[part];\n }\n\n const joined_key = joinKey(key);\n\n // 2. Cannot override table\n if (object && type === NodeType.Table && state.defined.has(joined_key)) {\n throw new Error(`Invalid key, a table has already been defined named ${joined_key}`);\n }\n\n // 3. Cannot add table array to static array or table\n if (object && type === NodeType.TableArray && !state.table_arrays.has(joined_key)) {\n throw new Error(`Invalid key, cannot add an array of tables to a table at ${joined_key}`);\n }\n}\n\nfunction ensureTable(object: any, key: string[]): any {\n const target = ensure(object, key.slice(0, -1));\n const last_key = last(key)!;\n if (!target[last_key]) {\n target[last_key] = blank();\n }\n\n return target[last_key];\n}\n\nfunction ensureTableArray(object: any, key: string[]): any {\n const target = ensure(object, key.slice(0, -1));\n const last_key = last(key)!;\n if (!target[last_key]) {\n target[last_key] = [];\n }\n\n const next = blank();\n target[last(key)!].push(next);\n\n return next;\n}\n\nfunction ensure(object: any, keys: string[]): any {\n return keys.reduce((active, subkey) => {\n if (!active[subkey]) {\n active[subkey] = blank();\n }\n return Array.isArray(active[subkey]) ? last(active[subkey]) : active[subkey];\n }, object);\n}\n\nfunction isPrimitive(value: any) {\n return typeof value !== 'object' && !isDate(value);\n}\n\nfunction joinKey(key: string[]): string {\n return key.join('.');\n}\n","import { isObject, datesEqual, stableStringify, merge } from './utils';\nimport { Path } from './find-by-path';\n\nexport enum ChangeType {\n Add = 'Add',\n Edit = 'Edit',\n Remove = 'Remove',\n Move = 'Move',\n Rename = 'Rename'\n}\n\nexport interface Add {\n type: ChangeType.Add;\n path: Path;\n}\nexport function isAdd(change: Change): change is Add {\n return change.type === ChangeType.Add;\n}\n\nexport interface Edit {\n type: ChangeType.Edit;\n path: Path;\n}\nexport function isEdit(change: Change): change is Edit {\n return change.type === ChangeType.Edit;\n}\n\nexport interface Remove {\n type: ChangeType.Remove;\n path: Path;\n}\nexport function isRemove(change: Change): change is Remove {\n return change.type === ChangeType.Remove;\n}\n\nexport interface Move {\n type: ChangeType.Move;\n path: Path;\n from: number;\n to: number;\n}\nexport function isMove(change: Change): change is Move {\n return change.type === ChangeType.Move;\n}\n\nexport interface Rename {\n type: ChangeType.Rename;\n path: Path;\n from: string;\n to: string;\n}\nexport function isRename(change: Change): change is Rename {\n return change.type === ChangeType.Rename;\n}\n\nexport type Change = Add | Edit | Remove | Move | Rename;\n\nexport default function diff(before: any, after: any, path: Path = []): Change[] {\n if (before === after || datesEqual(before, after)) {\n return [];\n }\n\n if (Array.isArray(before) && Array.isArray(after)) {\n return compareArrays(before, after, path);\n } else if (isObject(before) && isObject(after)) {\n return compareObjects(before, after, path);\n } else {\n return [\n {\n type: ChangeType.Edit,\n path\n }\n ];\n }\n}\n\nfunction compareObjects(before: any, after: any, path: Path = []): Change[] {\n let changes: Change[] = [];\n\n // 1. Get keys and stable values\n const before_keys = Object.keys(before);\n const before_stable = before_keys.map(key => stableStringify(before[key]));\n const after_keys = Object.keys(after);\n const after_stable = after_keys.map(key => stableStringify(after[key]));\n\n // Check for rename by seeing if object is in both before and after\n // and that key is no longer used in after\n const isRename = (stable: string, search: string[]) => {\n const index = search.indexOf(stable);\n if (index < 0) return false;\n\n const before_key = before_keys[before_stable.indexOf(stable)];\n return !after_keys.includes(before_key);\n };\n\n // 2. Check for changes, rename, and removed\n before_keys.forEach((key, index) => {\n const sub_path = path.concat(key);\n if (after_keys.includes(key)) {\n merge(changes, diff(before[key], after[key], sub_path));\n } else if (isRename(before_stable[index], after_stable)) {\n const to = after_keys[after_stable.indexOf(before_stable[index])];\n changes.push({\n type: ChangeType.Rename,\n path,\n from: key,\n to\n });\n } else {\n changes.push({\n type: ChangeType.Remove,\n path: sub_path\n });\n }\n });\n\n // 3. Check for additions\n after_keys.forEach((key, index) => {\n if (!before_keys.includes(key) && !isRename(after_stable[index], before_stable)) {\n changes.push({\n type: ChangeType.Add,\n path: path.concat(key)\n });\n }\n });\n\n return changes;\n}\n\nfunction compareArrays(before: any[], after: any[], path: Path = []): Change[] {\n let changes: Change[] = [];\n\n // 1. Convert arrays to stable objects\n const before_stable = before.map(stableStringify);\n const after_stable = after.map(stableStringify);\n\n // 2. Step through after array making changes to before array as-needed\n after_stable.forEach((value, index) => {\n const overflow = index >= before_stable.length;\n\n // Check if items are the same\n if (!overflow && before_stable[index] === value) {\n return;\n }\n\n // Check if item has been moved -> shift into place\n const from = before_stable.indexOf(value, index + 1);\n if (!overflow && from > -1) {\n changes.push({\n type: ChangeType.Move,\n path,\n from,\n to: index\n });\n\n const move = before_stable.splice(from, 1);\n before_stable.splice(index, 0, ...move);\n\n return;\n }\n\n // Check if item is removed -> assume it's been edited and replace\n const removed = !after_stable.includes(before_stable[index]);\n if (!overflow && removed) {\n merge(changes, diff(before[index], after[index], path.concat(index)));\n before_stable[index] = value;\n\n return;\n }\n\n // Add as new item and shift existing\n changes.push({\n type: ChangeType.Add,\n path: path.concat(index)\n });\n before_stable.splice(index, 0, value);\n });\n\n // 3. Remove any remaining overflow items\n for (let i = after_stable.length; i < before_stable.length; i++) {\n changes.push({\n type: ChangeType.Remove,\n path: path.concat(i)\n });\n }\n\n return changes;\n}\n","import { Node, isKeyValue, isTable, isTableArray, hasItems, isInlineItem, hasItem } from './ast';\nimport { arraysEqual, stableStringify } from './utils';\n\nexport type Path = Array;\n\nexport default function findByPath(node: Node, path: Path): Node {\n if (!path.length) return node;\n\n if (isKeyValue(node)) {\n return findByPath(node.value, path);\n }\n\n const indexes: { [key: string]: number } = {};\n let found;\n if (hasItems(node)) {\n node.items.some((item, index) => {\n try {\n let key: Path = [];\n if (isKeyValue(item)) {\n key = item.key.value;\n } else if (isTable(item)) {\n key = item.key.item.value;\n } else if (isTableArray(item)) {\n key = item.key.item.value;\n\n const key_string = stableStringify(key);\n if (!indexes[key_string]) {\n indexes[key_string] = 0;\n }\n const array_index = indexes[key_string]++;\n\n key = key.concat(array_index);\n } else if (isInlineItem(item) && isKeyValue(item.item)) {\n key = item.item.key.value;\n } else if (isInlineItem(item)) {\n key = [index];\n }\n\n if (key.length && arraysEqual(key, path.slice(0, key.length))) {\n found = findByPath(item, path.slice(key.length));\n return true;\n } else {\n return false;\n }\n } catch (err) {\n return false;\n }\n });\n }\n\n if (!found) {\n throw new Error(`Could not find node at path ${path.join('.')}`);\n }\n\n return found;\n}\n\nexport function tryFindByPath(node: Node, path: Path): Node | undefined {\n try {\n return findByPath(node, path);\n } catch (err) {}\n}\n\nexport function findParent(node: Node, path: Path): Node {\n let parent_path = path;\n let parent;\n while (parent_path.length && !parent) {\n parent_path = parent_path.slice(0, -1);\n parent = tryFindByPath(node, parent_path);\n }\n\n if (!parent) {\n throw new Error(`Count not find parent node for path ${path.join('.')}`);\n }\n\n return parent;\n}\n","import parseTOML from './parse-toml';\nimport parseJS from './parse-js';\nimport toTOML from './to-toml';\nimport toJS from './to-js';\nimport { Format } from './format';\n\nexport function parse(value: string): any {\n return toJS(parseTOML(value), value);\n}\n\nexport function stringify(value: any, format?: Format): string {\n const document = parseJS(value, format);\n return toTOML(document.items);\n}\n\nexport { default as patch } from './patch';\n","import parseTOML from './parse-toml';\nimport parseJS from './parse-js';\nimport toJS from './to-js';\nimport toTOML from './to-toml';\nimport { Format } from './format';\nimport {\n isKeyValue,\n WithItems,\n KeyValue,\n isTable,\n Node,\n Document,\n isDocument,\n Block,\n NodeType,\n isTableArray,\n isInlineArray,\n hasItem,\n InlineItem\n} from './ast';\nimport diff, { Change, isAdd, isEdit, isRemove, isMove, isRename } from './diff';\nimport findByPath, { tryFindByPath, findParent } from './find-by-path';\nimport { last, isInteger } from './utils';\nimport { insert, replace, remove, applyWrites } from './writer';\n\nexport default function patch(existing: string, updated: any, format?: Format): string {\n const existing_ast = parseTOML(existing);\n const items = [...existing_ast];\n\n const existing_js = toJS(items);\n const existing_document: Document = {\n type: NodeType.Document,\n loc: { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } },\n items\n };\n\n const updated_document = parseJS(updated, format);\n const changes = diff(existing_js, updated);\n\n const patched_document = applyChanges(existing_document, updated_document, changes);\n\n return toTOML(patched_document.items);\n}\n\nfunction applyChanges(original: Document, updated: Document, changes: Change[]): Document {\n // Potential Changes:\n //\n // Add: Add key-value to object, add item to array\n // Edit: Change in value\n // Remove: Remove key-value from object, remove item from array\n // Move: Move item in array\n // Rename: Rename key in key-value\n //\n // Special consideration, inline comments need to move as-needed\n\n changes.forEach(change => {\n if (isAdd(change)) {\n const child = findByPath(updated, change.path);\n const parent_path = change.path.slice(0, -1);\n let index = last(change.path)! as number;\n\n let is_table_array = isTableArray(child);\n if (isInteger(index) && !parent_path.some(isInteger)) {\n const sibling = tryFindByPath(original, parent_path.concat(0));\n if (sibling && isTableArray(sibling)) {\n is_table_array = true;\n }\n }\n\n let parent: Node;\n if (isTable(child)) {\n parent = original;\n } else if (is_table_array) {\n parent = original;\n\n // The index needs to be updated to top-level items\n // to properly account for other items, comments, and nesting\n const document = original as Document;\n const before = tryFindByPath(document, parent_path.concat(index - 1)) as Block | undefined;\n const after = tryFindByPath(document, parent_path.concat(index)) as Block | undefined;\n if (after) {\n index = document.items.indexOf(after);\n } else if (before) {\n index = document.items.indexOf(before) + 1;\n } else {\n index = document.items.length;\n }\n } else {\n parent = findParent(original, change.path);\n if (isKeyValue(parent)) parent = parent.value;\n }\n\n if (isTableArray(parent) || isInlineArray(parent) || isDocument(parent)) {\n insert(original, parent, child, index);\n } else {\n insert(original, parent, child);\n }\n } else if (isEdit(change)) {\n let existing = findByPath(original, change.path);\n let replacement = findByPath(updated, change.path);\n let parent;\n\n if (isKeyValue(existing) && isKeyValue(replacement)) {\n // Edit for key-value means value changes\n parent = existing;\n existing = existing.value;\n replacement = replacement.value;\n } else {\n parent = findParent(original, change.path);\n }\n\n replace(original, parent, existing, replacement);\n } else if (isRemove(change)) {\n let parent = findParent(original, change.path);\n if (isKeyValue(parent)) parent = parent.value;\n\n const node = findByPath(original, change.path);\n\n remove(original, parent, node);\n } else if (isMove(change)) {\n let parent = findByPath(original, change.path);\n if (hasItem(parent)) parent = parent.item;\n if (isKeyValue(parent)) parent = parent.value;\n\n const node = (parent as WithItems).items[change.from];\n\n remove(original, parent, node);\n insert(original, parent, node, change.to);\n } else if (isRename(change)) {\n let parent = findByPath(original, change.path.concat(change.from)) as\n | KeyValue\n | InlineItem;\n let replacement = findByPath(updated, change.path.concat(change.to)) as\n | KeyValue\n | InlineItem;\n\n if (hasItem(parent)) parent = parent.item;\n if (hasItem(replacement)) replacement = replacement.item;\n\n replace(original, parent, parent.key, replacement.key);\n }\n });\n\n applyWrites(original);\n return original;\n}\n"],"names":["NodeType","TokenType","isDocument","node","type","Document","isTable","Table","isTableArray","TableArray","isKeyValue","KeyValue","isInlineArray","InlineArray","isInlineItem","InlineItem","isInlineTable","InlineTable","isComment","Comment","hasItems","hasItem","TableKey","isTableKey","TableArrayKey","isTableArrayKey","Cursor","[object Object]","iterator","this","index","value","undefined","done","peeked","result","next","Symbol","getSpan","location","lines","end","line","start","columns","column","findPosition","input","Array","isArray","findLines","findIndex","line_index","BY_NEW_LINE","indexes","match","exec","push","length","clonePosition","position","cloneLocation","ParseError","Error","message","error_message","substr","getLine","pointer","count","character","repeat","whitespace","super","IS_WHITESPACE","IS_NEW_LINE","DOUBLE_QUOTE","SINGLE_QUOTE","SPACE","ESCAPE","IS_VALID_LEADING_CHARACTER","tokenize","cursor","locate","createLocate","test","specialCharacter","Bracket","Curly","Equal","Comma","Dot","comment","multiline_char","checkThree","multiline","string","raw","loc","peek","quotes","Literal","double_quoted","single_quoted","isFinished","next_item","current","check","last","values","blank","Object","create","isInteger","isDate","prototype","toString","call","isObject","has","object","key","hasOwnProperty","pipe","fns","reduce","fn","stableStringify","keys","sort","map","JSON","stringify","join","merge","target","original_length","added_length","i","TRIPLE_DOUBLE_QUOTE","TRIPLE_SINGLE_QUOTE","LF","CRLF","IS_CRLF","IS_LF","IS_LEADING_NEW_LINE","IS_LINE_ENDING_BACKSLASH","parseString","startsWith","trim","trimLeadingWhitespace","lineEndingBackslash","escapeNewLines","unescape","escaped","json_escaped","replace","code_point","parseInt","as_string","String","fromCodePoint","parse","TRUE","FALSE","HAS_E","IS_DIVIDER","IS_INF","IS_NAN","IS_HEX","IS_OCTAL","IS_BINARY","IS_FULL_DATE","IS_FULL_TIME","parseTOML","tokens","walkBlock","is_table","item","Key","dot","before","after","items","table","equals","comments","walkValue","keyValue","Boolean","boolean","Date","local_date","toISOString","split","DateTime","datetime","Infinity","Number","Float","float","Integer","radix","integer","previous","comma","inline_item","inlineTable","inline_array","additional_comments","inlineArray","traverse","ast","visitor","traverseArray","array","parent","traverseNode","visit","enter","exit","enter_offsets","WeakMap","getEnter","root","set","get","exit_offsets","getExit","existing","replacement","indexOf","splice","shiftNode","existing_span","replacement_span","addOffset","insert","child","shift","offset","is_last","leading_comma","trailing_comma","last_comma","use_new_line","perLine","leading_lines","skip_comma","skip_bracket","child_span","insertInline","use_first_line","is_block","insertOnNewLine","previous_offset","delete","remove","removed_span","is_inline","previous_on_same_line","next_on_sameLine","keep_line","target_offsets","node_offsets","removed_offset","applyBracketSpacing","bracket_spacing","last_item","applyTrailingComma","trailing_commas","applyWrites","shiftStart","entering","shiftEnd","exiting","shiftLocation","start_line","key_offset","span","options","first_line_only","move","offsets","from","generateDocument","generateTable","table_key","keyValueToRaw","generateTableKey","generateTableArray","table_array_key","generateTableArrayKey","generateKeyValue","key_node","generateKey","IS_BARE_KEY","part","generateInlineItem","formatTopLevel","document","filter","is_inline_table","is_inline_array","forEach","key_value","formatTable","inline_array_item","table_array","inline_table_item","formatTableArray","formatEmptyLines","default_format","printWidth","trailingComma","bracketSpacing","parseJS","format","assign","toJSON","walkObject","formatPrintWidth","isString","generateString","generateInteger","isFloat","generateFloat","isBoolean","generateBoolean","generateDateTime","element","walkInlineArray","inline_table","walkInlineTable","toTOML","newline","write","raw_lines","expected_lines","is_start_line","is_end_line","padEnd","toJS","tables","Set","table_arrays","defined","previous_active","active","skip","validateKey","err","joined_key","joinKey","add","ensureTable","ensure","slice","last_key","ensureTableArray","toValue","state","parts","joined_parts","next_is_last","subkey","ChangeType","diff","path","b","a","changes","before_stable","after_stable","overflow","Move","to","removed","includes","concat","Add","Remove","compareArrays","before_keys","after_keys","isRename","stable","search","before_key","sub_path","Rename","compareObjects","Edit","findByPath","found","some","key_string","array_index","arraysEqual","tryFindByPath","findParent","parent_path","updated","existing_js","original","change","isAdd","is_table_array","sibling","isEdit","isRemove","isMove","applyChanges"],"mappings":"iMAEA,IAAYA,ECEAC,WD4BIC,EAAWC,GACzB,OAAOA,EAAKC,OAASJ,EAASK,kBAqBhBC,EAAQH,GACtB,OAAOA,EAAKC,OAASJ,EAASO,eAuChBC,EAAaL,GAC3B,OAAOA,EAAKC,OAASJ,EAASS,oBAoChBC,EAAWP,GACzB,OAAOA,EAAKC,OAASJ,EAASW,kBAiGhBC,EAAcT,GAC5B,OAAOA,EAAKC,OAASJ,EAASa,qBAiBhBC,EAAaX,GAC3B,OAAOA,EAAKC,OAASJ,EAASe,oBAYhBC,EAAcb,GAC5B,OAAOA,EAAKC,OAASJ,EAASiB,qBAyBhBC,EAAUf,GACxB,OAAOA,EAAKC,OAASJ,EAASmB,iBAUhBC,EAASjB,GACvB,OACED,EAAWC,IACXG,EAAQH,IACRK,EAAaL,IACba,EAAcb,IACdS,EAAcT,YAOFkB,EAAQlB,GACtB,gBA/OyBA,GACzB,OAAOA,EAAKC,OAASJ,EAASsB,SA8OvBC,CAAWpB,aAxMYA,GAC9B,OAAOA,EAAKC,OAASJ,EAASwB,cAuMHC,CAAgBtB,IAASW,EAAaX,IArTnE,SAAYH,GACVA,sBACAA,gBACAA,sBACAA,0BACAA,gCACAA,sBACAA,YACAA,kBACAA,oBACAA,gBACAA,oBACAA,sBACAA,4BACAA,0BACAA,4BACAA,oBAhBF,CAAYA,IAAAA,aEES0B,EAOnBC,YAAYC,GACVC,KAAKD,SAAWA,EAChBC,KAAKC,OAAS,EACdD,KAAKE,WAAQC,EACbH,KAAKI,MAAO,EACZJ,KAAKK,OAAS,KAGhBP,OACE,GAAIE,KAAKI,KAAM,OAAOA,IAEtB,MAAME,EAASN,KAAKK,QAAUL,KAAKD,SAASQ,OAO5C,OALAP,KAAKC,OAAS,EACdD,KAAKE,MAAQI,EAAOJ,MACpBF,KAAKI,KAAOE,EAAOF,KACnBJ,KAAKK,OAAS,KAEPC,EAGTR,OACE,OAAIE,KAAKI,KAAaA,IAClBJ,KAAKK,OAAeL,KAAKK,QAE7BL,KAAKK,OAASL,KAAKD,SAASQ,OACrBP,KAAKK,QAGdP,CAACU,OAAOT,YACN,OAAOC,MAIX,SAASI,IACP,MAAO,CAAEF,WAAOC,EAAWC,MAAM,YC9BnBK,EAAQC,GACtB,MAAO,CACLC,MAAOD,EAASE,IAAIC,KAAOH,EAASI,MAAMD,KAAO,EACjDE,QAASL,EAASE,IAAII,OAASN,EAASI,MAAME,iBAgBlCC,EAAaC,EAA0BjB,GAarD,MAAMU,EAAQQ,MAAMC,QAAQF,GAASA,EAAQG,EAAUH,GACjDL,EAAOF,EAAMW,UAAUC,GAAcA,GAActB,GAAS,EAGlE,MAAO,CAAEY,KAAAA,EAAMG,OAFAf,GAASU,EAAME,EAAO,GAAK,GAAK,aAajCQ,EAAUH,GAExB,MAAMM,EAAc,aACdC,EAAoB,GAE1B,IAAIC,EACJ,KAA4C,OAApCA,EAAQF,EAAYG,KAAKT,KAC/BO,EAAQG,KAAKF,EAAMzB,OAIrB,OAFAwB,EAAQG,KAAKV,EAAMW,OAAS,GAErBJ,WAGOK,EAAcC,GAC5B,MAAO,CAAElB,KAAMkB,EAASlB,KAAMG,OAAQe,EAASf,iBAGjCgB,EAActB,GAC5B,MAAO,CAAEI,MAAOgB,EAAcpB,EAASI,OAAQF,IAAKkB,EAAcpB,EAASE,YChFxDqB,UAAmBC,MAItCpC,YAAYoB,EAAea,EAAoBI,GAC7C,IAAIC,yBAAuCL,EAASlB,SAASkB,EAASf,OAAS,QAE/E,GAAIE,EAAO,CACT,MAAML,WD6CYK,EAAea,GACrC,MAAMpB,EAAQU,EAAUH,GAClBJ,EAAQH,EAAMoB,EAASlB,KAAO,IAAM,EACpCD,EAAMD,EAAMoB,EAASlB,KAAO,IAAMK,EAAMW,OAE9C,OAAOX,EAAMmB,OAAOvB,EAAOF,EAAME,GClDhBwB,CAAQpB,EAAOa,GACtBQ,KAiBZ,SAAoBC,EAAeC,EAAoB,KACrD,OAAOA,EAAUC,OAAOF,GAlBDG,CAAWZ,EAASf,WAEnCH,IAAMuB,MAAoBvB,MAAS0B,OAIzCK,MAFAR,GAAiBD,GAIjBnC,KAAKa,KAAOkB,EAASlB,KACrBb,KAAKgB,OAASe,EAASf,SHhB3B,SAAY5C,GACVA,oBACAA,gBACAA,gBACAA,gBACAA,YACAA,oBACAA,oBAPF,CAAYA,IAAAA,OAgBL,MAAMyE,EAAgB,KAChBC,EAAc,YACdC,EAAe,IACfC,EAAe,IACfC,EAAQ,IACRC,EAAS,KAEhBC,EAA6B,kCAElBC,EAASlC,GACxB,MAAMmC,EAAS,IAAIxD,EAAgBqB,EC7BtBV,OAAOT,aD8BpBsD,EAAO9C,OAEP,MAAM+C,WETqBpC,GAC3B,MAAMP,EAAQU,EAAUH,GAExB,MAAO,CAACJ,EAAeF,KACd,CACLE,MAAOG,EAAaN,EAAOG,GAC3BF,IAAKK,EAAaN,EAAOC,KFGd2C,CAAarC,GAE5B,MAAQmC,EAAOjD,MAAM,CACnB,GAAIyC,EAAcW,KAAKH,EAAOnD,aAEvB,GAAqB,MAAjBmD,EAAOnD,OAAkC,MAAjBmD,EAAOnD,YAElCuD,EAAiBJ,EAAQC,EAAQlF,EAAUsF,cAC5C,GAAqB,MAAjBL,EAAOnD,OAAkC,MAAjBmD,EAAOnD,YAClCuD,EAAiBJ,EAAQC,EAAQlF,EAAUuF,YAC5C,GAAqB,MAAjBN,EAAOnD,YACVuD,EAAiBJ,EAAQC,EAAQlF,EAAUwF,YAC5C,GAAqB,MAAjBP,EAAOnD,YACVuD,EAAiBJ,EAAQC,EAAQlF,EAAUyF,YAC5C,GAAqB,MAAjBR,EAAOnD,YACVuD,EAAiBJ,EAAQC,EAAQlF,EAAU0F,UAC5C,GAAqB,MAAjBT,EAAOnD,YAEV6D,EAAQV,EAAQC,OACjB,CACL,MAAMU,EACJC,EAAW/C,EAAOmC,EAAOpD,MAAO+C,IAChCiB,EAAW/C,EAAOmC,EAAOpD,MAAO8C,GAE9BiB,QAEIE,EAAUb,EAAQC,EAAQU,EAAgB9C,SAE1CiD,EAAOd,EAAQC,EAAQpC,GAIjCmC,EAAO9C,QAIX,SAASkD,EAAiBJ,EAAwBC,EAAiB/E,GACjE,MAAO,CAAEA,KAAAA,EAAM6F,IAAKf,EAAOnD,MAAQmE,IAAKf,EAAOD,EAAOpD,MAAOoD,EAAOpD,MAAQ,IAG9E,SAAS8D,EAAQV,EAAwBC,GACvC,MAAMxC,EAAQuC,EAAOpD,MACrB,IAAImE,EAAMf,EAAOnD,MACjB,MAAQmD,EAAOiB,OAAOlE,OAAS0C,EAAYU,KAAKH,EAAOiB,OAAOpE,QAC5DmD,EAAO9C,OACP6D,GAAOf,EAAOnD,MAKhB,MAAO,CACL3B,KAAMH,EAAUkB,QAChB8E,IAAAA,EACAC,IAAKf,EAAOxC,EAAOuC,EAAOpD,MAAQ,IAItC,SAASiE,EACPb,EACAC,EACAU,EACA9C,GAEA,MAAMJ,EAAQuC,EAAOpD,MACrB,IAAIsE,EAASP,EAAiBA,EAAiBA,EAC3CI,EAAMG,EAOV,IAJAlB,EAAO9C,OACP8C,EAAO9C,OACP8C,EAAO9C,QAEC8C,EAAOjD,OAAS6D,EAAW/C,EAAOmC,EAAOpD,MAAO+D,IACtDI,GAAOf,EAAOnD,MACdmD,EAAO9C,OAGT,GAAI8C,EAAOjD,KACT,MAAM,IAAI6B,EACRf,EACAD,EAAaC,EAAOmC,EAAOpD,kDACgBsE,0BAS/C,OALAH,GAAOG,EAEPlB,EAAO9C,OACP8C,EAAO9C,OAEA,CACLhC,KAAMH,EAAUoG,QAChBJ,IAAAA,EACAC,IAAKf,EAAOxC,EAAOuC,EAAOpD,MAAQ,IAItC,SAASkE,EAAOd,EAAwBC,EAAiBpC,GAsBvD,IAAKiC,EAA2BK,KAAKH,EAAOnD,OAC1C,MAAM,IAAI+B,EACRf,EACAD,EAAaC,EAAOmC,EAAOpD,iCACDoD,EAAOnD,mDAIrC,MAAMY,EAAQuC,EAAOpD,MACrB,IAAImE,EAAMf,EAAOnD,MACbuE,EAAgBpB,EAAOnD,QAAU6C,EACjC2B,EAAgBrB,EAAOnD,QAAU8C,EAErC,MAAM2B,EAActB,IAClB,GAAIA,EAAOiB,OAAOlE,KAAM,OAAO,EAC/B,MAAMwE,EAAYvB,EAAOiB,OAAOpE,MAEhC,QACIuE,GAAiBC,KAClB7B,EAAcW,KAAKoB,IACJ,MAAdA,GACc,MAAdA,GACc,MAAdA,GACc,MAAdA,GACc,MAAdA,IAIN,MAAQvB,EAAOjD,OAASuE,EAAWtB,KACjCA,EAAO9C,OAEH8C,EAAOnD,QAAU6C,IAAc0B,GAAiBA,GAChDpB,EAAOnD,QAAU8C,GAAiByB,IAAeC,GAAiBA,GAEtEN,GAAOf,EAAOnD,OAEVmD,EAAOiB,OAAOlE,OARwB,CAS1C,IAAIwE,EAAYvB,EAAOiB,OAAOpE,MAI1BuE,GAAiBpB,EAAOnD,QAAUgD,IAChC0B,IAAc7B,GAChBqB,GAAOrB,EACPM,EAAO9C,QACEqE,IAAc1B,IACvBkB,GAAOlB,EACPG,EAAO9C,SAKb,GAAIkE,GAAiBC,EACnB,MAAM,IAAIzC,EACRf,EACAD,EAAaC,EAAOJ,oCACa2D,EAAgB1B,EAAeC,KAIpE,MAAO,CACLzE,KAAMH,EAAUoG,QAChBJ,IAAAA,EACAC,IAAKf,EAAOxC,EAAOuC,EAAOpD,MAAQ,IAItC,SAASgE,EAAW/C,EAAe2D,EAAiBC,GAClD,OACE5D,EAAM2D,KAAaC,GACnB5D,EAAM2D,EAAU,KAAOC,GACvB5D,EAAM2D,EAAU,KAAOC,GACvBA,WIhOYC,EAAaC,GAC3B,OAAOA,EAAOA,EAAOnD,OAAS,YAKhBoD,IACd,OAAOC,OAAOC,OAAO,eAOPC,EAAUlF,GACxB,MAAwB,iBAAVA,GAAsBA,EAAQ,GAAM,WAWpCmF,EAAOnF,GACrB,MAAiD,kBAA1CgF,OAAOI,UAAUC,SAASC,KAAKtF,YAGxBuF,EAASvF,GACvB,OAAOA,GAA0B,iBAAVA,IAAuBmF,EAAOnF,KAAWiB,MAAMC,QAAQlB,YAOhEwF,EAAIC,EAAaC,GAC/B,OAAOV,OAAOI,UAAUO,eAAeL,KAAKG,EAAQC,YAiBtCE,EAAa5F,KAAkB6F,GAC7C,OAAOA,EAAIC,OAAO,CAAC9F,EAAO+F,IAAOA,EAAG/F,GAAQA,YAG9BgG,EAAgBP,GAC9B,GAAIF,EAASE,GAAS,CAKpB,UAJmBT,OAAOiB,KAAKR,GAC5BS,OACAC,IAAIT,MAAUU,KAAKC,UAAUX,MAAQM,EAAgBP,EAAOC,OAEzCY,KAAK,QACtB,OAAIrF,MAAMC,QAAQuE,OACZA,EAAOU,IAAIH,GAAiBM,KAAK,QAErCF,KAAKC,UAAUZ,YAIVc,EAAcC,EAAkB1B,GAG9C,MAAM2B,EAAkBD,EAAO7E,OACzB+E,EAAe5B,EAAOnD,OAC5B6E,EAAO7E,OAAS8E,EAAkBC,EAElC,IAAK,IAAIC,EAAI,EAAGA,EAAID,EAAcC,IAChCH,EAAOC,EAAkBE,GAAK7B,EAAO6B,GC/EzC,MAAMC,EAAsB,MACtBC,EAAsB,MACtBC,EAAK,MACLC,EAAO,SACPC,EAAU,QACVC,EAAQ,MACRC,EAAsB,aACtBC,EAA2B,6BAEjBC,EAAYlD,GAC1B,OAAIA,EAAImD,WAAWR,GACVjB,EACL0B,EAAKpD,EAAK,GACVqD,IAEOrD,EAAImD,WAAWvE,GACjBwE,EAAKpD,EAAK,GACRA,EAAImD,WAAWT,GACjBhB,EACL0B,EAAKpD,EAAK,GACVqD,GACAC,GACAC,GACAC,GAEOxD,EAAImD,WAAWxE,GACjB+C,EACL0B,EAAKpD,EAAK,GACVwD,GAGKxD,WAIKwD,EAASC,GAGvB,MACMC,EAAeD,EAAQE,QADP,qBAC8B7H,IAClD,MAAM8H,EAAaC,SAAS/H,EAAM6H,QAAQ,MAAO,IAAK,IAChDG,EAAYC,OAAOC,cAAcJ,GAEvC,OAAOR,EAAKlB,KAAKC,UAAU2B,GAAY,KAGzC,OAAO5B,KAAK+B,UAAUP,MAOxB,SAASN,EAAKtH,EAAesC,GAC3B,OAAOtC,EAAMmC,OAAOG,EAAOtC,EAAM2B,OAAiB,EAARW,GAG5C,SAASiF,GAAsBvH,GAC7B,OAAOkH,EAAoB5D,KAAKtD,GAASA,EAAMmC,OAAO,GAAKnC,EAG7D,SAASyH,GAAezH,GACtB,OAAOA,EAAM6H,QAAQb,EAASD,GAAMc,QAAQZ,EAAOH,GAGrD,SAASU,GAAoBxH,GAC3B,OAAOA,EAAM6H,QAAQV,EAA0B,ICzCjD,MAAMiB,GAAO,OACPC,GAAQ,QACRC,GAAQ,KACRC,GAAa,MACbC,GAAS,MACTC,GAAS,MACTC,GAAS,MACTC,GAAW,MACXC,GAAY,MACLC,GAAe,0BACfC,GAAe,mCAEHC,GAAU/H,GACjC,MAAMgI,EAAS9F,EAASlC,GAClBmC,EAAS,IAAIxD,EAAOqJ,GAE1B,MAAQ7F,EAAO9C,OAAOH,YACb+I,GAAU9F,EAAQnC,GAI7B,SAAUiI,GAAU9F,EAAuBnC,GACzC,GAAImC,EAAOnD,MAAO3B,OAASH,EAAUkB,cAC7ByE,GAAQV,QACT,GAAIA,EAAOnD,MAAO3B,OAASH,EAAUsF,cAyD9C,SAAeL,EAAuBnC,GAgBpC,MAAM3C,EACH8E,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAUsF,QAE3DvF,EAASO,MADTP,EAASS,WAETwK,EAAW7K,IAASJ,EAASO,MAEnC,GAAI0K,GAAkC,MAAtB/F,EAAOnD,MAAOkE,IAC5B,MAAM,IAAInC,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,2CACmBuC,EAAOnD,MAAOkE,OAGvD,IAAKgF,IAAmC,MAAtB/F,EAAOnD,MAAOkE,KAA4C,MAA7Bf,EAAOiB,OAAOpE,MAAOkE,KAClE,MAAM,IAAInC,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,sDAC8BuC,EAAOnD,MAAOkE,IAAMf,EAAOiB,OAAOpE,MAAOkE,OAK7F,MAAMwB,EAAMwD,EACP,CACC7K,KAAMJ,EAASsB,SACf4E,IAAKhB,EAAOnD,MAAOmE,KAEpB,CACC9F,KAAMJ,EAASwB,cACf0E,IAAKhB,EAAOnD,MAAOmE,KAIzBhB,EAAO9C,OACHhC,IAASJ,EAASS,YAAYyE,EAAO9C,OAEzC,GAAI8C,EAAOjD,KACT,MAAM,IAAI6B,EAAWf,EAAO0E,EAAIvB,IAAKvD,MAAO,2CAG9C8E,EAAIyD,KAAO,CACT9K,KAAMJ,EAASmL,IACfjF,IAAKrC,EAAcqB,EAAOnD,MAAOmE,KACjCD,IAAKf,EAAOnD,MAAOkE,IACnBlE,MAAO,CAACoH,EAAYjE,EAAOnD,MAAOkE,OAGpC,MAAQf,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAU0F,KAAK,CACzET,EAAO9C,OACP,MAAMgJ,EAAMlG,EAAOnD,MAEnBmD,EAAO9C,OACP,MAAMiJ,EAAS,IAAI9G,OAAO6G,EAAIlF,IAAIvD,MAAME,OAAS4E,EAAIyD,KAAKhF,IAAIzD,IAAII,QAC5DyI,EAAQ,IAAI/G,OAAOW,EAAOnD,MAAOmE,IAAIvD,MAAME,OAASuI,EAAIlF,IAAIzD,IAAII,QAEtE4E,EAAIyD,KAAKhF,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIzD,IACrCgF,EAAIyD,KAAKjF,QAAUoF,KAAUC,IAAQpG,EAAOnD,MAAOkE,MACnDwB,EAAIyD,KAAKnJ,MAAM0B,KAAK0F,EAAYjE,EAAOnD,MAAOkE,MAKhD,GAFAf,EAAO9C,OAEH6I,IAAa/F,EAAOjD,MAA8B,MAAtBiD,EAAOnD,MAAOkE,KAC5C,MAAM,IAAInC,EACRf,EACAmC,EAAOjD,KAAOwF,EAAIyD,KAAKhF,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIvD,2CACduC,EAAOjD,KAAO,cAAgBiD,EAAOnD,MAAOkE,OAGrF,IACGgF,IACA/F,EAAOjD,MACNiD,EAAOiB,OAAOlE,MACQ,MAAtBiD,EAAOnD,MAAOkE,KACe,MAA7Bf,EAAOiB,OAAOpE,MAAOkE,KAEvB,MAAM,IAAInC,EACRf,EACAmC,EAAOjD,MAAQiD,EAAOiB,OAAOlE,KAAOwF,EAAIyD,KAAKhF,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIvD,sDAEvEuC,EAAOjD,MAAQiD,EAAOiB,OAAOlE,KACzB,cACAiD,EAAOnD,MAAOkE,IAAMf,EAAOiB,OAAOpE,MAAOkE,OAM9CgF,GAAU/F,EAAO9C,OACtBqF,EAAIvB,IAAKzD,IAAMyC,EAAOnD,MAAOmE,IAAIzD,IAGjC,IAAI8I,EAAmC,GACvC,MAAQrG,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAUsF,SACpEL,EAAO9C,OACPkG,EAAMiD,EAAO,IAAIP,GAAU9F,EAAQnC,KAGrC,MAAO,CACL3C,KAAM6K,EAAWjL,EAASO,MAAQP,EAASS,WAC3CyF,IAAK,CACHvD,MAAOgB,EAAc8D,EAAIvB,IAAKvD,OAC9BF,IACIkB,EADC4H,EAAM7H,OACO6H,EAAMA,EAAM7H,OAAS,GAAGwC,IAAIzD,IAC5BgF,EAAIvB,IAAKzD,MAE7BgF,IAAKA,EACL8D,MAAAA,GAnLMC,CAAMtG,EAAQnC,OACf,CAAA,GAAImC,EAAOnD,MAAO3B,OAASH,EAAUoG,QAG1C,MAAM,IAAIvC,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,2BACGuC,EAAOnD,MAAO3B,qDAgLzC,SAAkB8E,EAAuBnC,GAOvC,MAAM0E,EAAW,CACfrH,KAAMJ,EAASmL,IACfjF,IAAKrC,EAAcqB,EAAOnD,MAAOmE,KACjCD,IAAKf,EAAOnD,MAAOkE,IACnBlE,MAAO,CAACoH,EAAYjE,EAAOnD,MAAOkE,OAGpC,MAAQf,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAU0F,KACpET,EAAO9C,OACP8C,EAAO9C,OAEPqF,EAAIvB,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIzD,IAChCgF,EAAIxB,SAAWf,EAAOnD,MAAOkE,MAC7BwB,EAAI1F,MAAM0B,KAAK0F,EAAYjE,EAAOnD,MAAOkE,MAK3C,GAFAf,EAAO9C,OAEH8C,EAAOjD,MAAQiD,EAAOnD,MAAO3B,OAASH,EAAUwF,MAClD,MAAM,IAAI3B,EACRf,EACAmC,EAAOjD,KAAOwF,EAAIvB,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIvD,2CACTuC,EAAOjD,KAAO,cAAgBiD,EAAOnD,MAAOkE,OAIrF,MAAMwF,EAASvG,EAAOnD,MAAOmE,IAAIvD,MAAME,OAIvC,GAFAqC,EAAO9C,OAEH8C,EAAOjD,KACT,MAAM,IAAI6B,EAAWf,EAAO0E,EAAIvB,IAAIvD,MAAO,qDAG7C,MAAOZ,KAAU2J,GAAYC,GAAUzG,EAAQnC,GAE/C,MAAO,CACL,CACE3C,KAAMJ,EAASW,SACf8G,IAAAA,EACA1F,MAAOA,EACPmE,IAAK,CACHvD,MAAOgB,EAAc8D,EAAIvB,IAAIvD,OAC7BF,IAAKkB,EAAc5B,EAAMmE,IAAIzD,MAE/BgJ,OAAAA,MAEEC,GA3OGE,CAAS1G,EAAQnC,IAU5B,SAAU4I,GAAUzG,EAAuBnC,GACzC,GAAImC,EAAOnD,MAAO3B,OAASH,EAAUoG,QAC/BnB,EAAOnD,MAAOkE,IAAI,KAAOrB,GAAgBM,EAAOnD,MAAOkE,IAAI,KAAOpB,QAmO1E,SAAgBK,GACd,MAAO,CACL9E,KAAMJ,EAASgK,OACf9D,IAAKhB,EAAOnD,MAAOmE,IACnBD,IAAKf,EAAOnD,MAAOkE,IACnBlE,MAAOoH,EAAYjE,EAAOnD,MAAOkE,MAvOzBD,CAAOd,GACJA,EAAOnD,MAAOkE,MAAQkE,IAAQjF,EAAOnD,MAAOkE,MAAQmE,SA0OnE,SAAiBlF,GACf,MAAO,CACL9E,KAAMJ,EAAS6L,QACf3F,IAAKhB,EAAOnD,MAAOmE,IACnBnE,MAAOmD,EAAOnD,MAAOkE,MAAQkE,IA7OrB2B,CAAQ5G,GACL0F,GAAavF,KAAKH,EAAOnD,MAAOkE,MAAQ4E,GAAaxF,KAAKH,EAAOnD,MAAOkE,WAgPvF,SAAkBf,EAAuBnC,GAmBvC,IAEIhB,EAFAmE,EAAMhB,EAAOnD,MAAOmE,IACpBD,EAAMf,EAAOnD,MAAOkE,IAKxB,IACGf,EAAOiB,OAAOlE,MACfiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAUoG,SACxCuE,GAAavF,KAAKY,IAClB4E,GAAaxF,KAAKH,EAAOiB,OAAOpE,MAAOkE,KACvC,CACA,MAAMtD,EAAQuD,EAAIvD,MAElBuC,EAAO9C,OACP8D,EAAM,CAAEvD,MAAAA,EAAOF,IAAKyC,EAAOnD,MAAOmE,IAAIzD,KACtCwD,OAAWf,EAAOnD,MAAOkE,MAG3B,IAAKf,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAU0F,IAAK,CACtE,MAAMhD,EAAQuD,EAAIvD,MAIlB,GAFAuC,EAAO9C,OAEH8C,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAUoG,QAChE,MAAM,IAAIvC,EAAWf,EAAOmC,EAAOnD,MAAOmE,IAAIzD,IAAK,0CAErDyC,EAAO9C,OAEP8D,EAAM,CAAEvD,MAAAA,EAAOF,IAAKyC,EAAOnD,MAAOmE,IAAIzD,KACtCwD,OAAWf,EAAOnD,MAAOkE,MAG3B,GAAK2E,GAAavF,KAAKY,GAKrBlE,EAAQ,IAAIgK,KAAK9F,EAAI2D,QAAQ,IAAK,UALP,CAE3B,MAAOoC,IAAc,IAAID,MAAOE,cAAcC,MAAM,KACpDnK,EAAQ,IAAIgK,QAAQC,KAAc/F,KAKpC,MAAO,CACL7F,KAAMJ,EAASmM,SACfjG,IAAAA,EACAD,IAAAA,EACAlE,MAAAA,GA/SQqK,CAASlH,EAAQnC,IAErBmC,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAU0F,KAChE4E,GAAOlF,KAAKH,EAAOnD,MAAOkE,MAC1BuE,GAAOnF,KAAKH,EAAOnD,MAAOkE,MACzBoE,GAAMhF,KAAKH,EAAOnD,MAAOkE,OAASwE,GAAOpF,KAAKH,EAAOnD,MAAOkE,WA8SnE,SAAef,EAAuBnC,GACpC,IAEIhB,EAFAmE,EAAMhB,EAAOnD,MAAOmE,IACpBD,EAAMf,EAAOnD,MAAOkE,IAGxB,GAAIsE,GAAOlF,KAAKY,GACdlE,EAAgB,SAARkE,GAAkBoG,EAAAA,EAAWA,EAAAA,OAChC,GAAI7B,GAAOnF,KAAKY,GACrBlE,EAAyB,SACpB,GAAKmD,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAU0F,IAmBxE5D,EAAQuK,OAAOrG,EAAI2D,QAAQU,GAAY,SAnBsC,CAC7E,MAAM3H,EAAQuD,EAAIvD,MASlB,GAFAuC,EAAO9C,OAEH8C,EAAOiB,OAAOlE,MAAQiD,EAAOiB,OAAOpE,MAAO3B,OAASH,EAAUoG,QAChE,MAAM,IAAIvC,EAAWf,EAAOmC,EAAOnD,MAAOmE,IAAIzD,IAAK,qCAErDyC,EAAO9C,OAEP6D,OAAWf,EAAOnD,MAAOkE,MACzBC,EAAM,CAAEvD,MAAAA,EAAOF,IAAKyC,EAAOnD,MAAOmE,IAAIzD,KACtCV,EAAQuK,OAAOrG,EAAI2D,QAAQU,GAAY,KAKzC,MAAO,CAAElK,KAAMJ,EAASuM,MAAOrG,IAAAA,EAAKD,IAAAA,EAAKlE,MAAAA,GA3U/ByK,CAAMtH,EAAQnC,SA8U1B,SAAiBmC,GAEf,GAA0B,OAAtBA,EAAOnD,MAAOkE,KAAsC,OAAtBf,EAAOnD,MAAOkE,IAC9C,MAAO,CACL7F,KAAMJ,EAASyM,QACfvG,IAAKhB,EAAOnD,MAAOmE,IACnBD,IAAKf,EAAOnD,MAAOkE,IACnBlE,MAAO,GAIX,IAAI2K,EAAQ,GACRjC,GAAOpF,KAAKH,EAAOnD,MAAOkE,KAC5ByG,EAAQ,GACChC,GAASrF,KAAKH,EAAOnD,MAAOkE,KACrCyG,EAAQ,EACC/B,GAAUtF,KAAKH,EAAOnD,MAAOkE,OACtCyG,EAAQ,GAGV,MAAM3K,EAAQ+H,SACZ5E,EACGnD,MAAOkE,IAAI2D,QAAQU,GAAY,IAC/BV,QAAQc,GAAU,IAClBd,QAAQe,GAAW,IACtB+B,GAGF,MAAO,CACLtM,KAAMJ,EAASyM,QACfvG,IAAKhB,EAAOnD,MAAOmE,IACnBD,IAAKf,EAAOnD,MAAOkE,IACnBlE,MAAAA,GA5WQ4K,CAAQzH,QAEX,GAAIA,EAAOnD,MAAO3B,OAASH,EAAUuF,YA8W9C,SAAqBN,EAAuBnC,GAC1C,GAA0B,MAAtBmC,EAAOnD,MAAOkE,IAChB,MAAM,IAAInC,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,8CACsBuC,EAAOnD,MAAOkE,OAK1D,MAAMlE,EAAqB,CACzB3B,KAAMJ,EAASiB,YACfiF,IAAKrC,EAAcqB,EAAOnD,MAAOmE,KACjCqF,MAAO,IAGTrG,EAAO9C,OAEP,MACG8C,EAAOjD,OACNiD,EAAOnD,MAAO3B,OAASH,EAAUuF,OAAyC,MAA/BN,EAAOnD,MAAgBkE,MACpE,CACA,GAAKf,EAAOnD,MAAgB3B,OAASH,EAAUyF,MAAO,CACpD,MAAMkH,EAAW7K,EAAMwJ,MAAMxJ,EAAMwJ,MAAM7H,OAAS,GAClD,IAAKkJ,EACH,MAAM,IAAI9I,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,MAClB,oDAIJiK,EAASC,OAAQ,EACjBD,EAAS1G,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIvD,MAErCuC,EAAO9C,OACP,SAGF,MAAO8I,GAAQF,GAAU9F,EAAQnC,GACjC,GAAImI,EAAK9K,OAASJ,EAASW,SACzB,MAAM,IAAImD,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,+DACuCuI,EAAK9K,QAIlE,MAAM0M,EAAoC,CACxC1M,KAAMJ,EAASe,WACfmF,IAAKrC,EAAcqH,EAAKhF,KACxBgF,KAAAA,EACA2B,OAAO,GAGT9K,EAAMwJ,MAAM9H,KAAKqJ,GACjB5H,EAAO9C,OAGT,GACE8C,EAAOjD,MACPiD,EAAOnD,MAAO3B,OAASH,EAAUuF,OACD,MAA/BN,EAAOnD,MAAgBkE,IAExB,MAAM,IAAInC,EACRf,EACAmC,EAAOjD,KAAOF,EAAMmE,IAAIvD,MAAQuC,EAAOnD,MAAOmE,IAAIvD,6BAC3BuC,EAAOjD,KAAO,cAAgBiD,EAAOnD,MAAOkE,OAMvE,OAFAlE,EAAMmE,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIzD,IAE3BV,EAtbCgL,CAAY7H,EAAQnC,OACrB,CAAA,GAAImC,EAAOnD,MAAO3B,OAASH,EAAUsF,QAM1C,MAAM,IAAIzB,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,kCACUuC,EAAOnD,MAAO3B,6CATO,CACnD,MAAO4M,EAActB,GAubzB,SAAqBxG,EAAuBnC,GAE1C,GAA0B,MAAtBmC,EAAOnD,MAAOkE,IAChB,MAAM,IAAInC,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,8CACsBuC,EAAOnD,MAAOkE,OAI1D,MAAMlE,EAAqB,CACzB3B,KAAMJ,EAASa,YACfqF,IAAKrC,EAAcqB,EAAOnD,MAAOmE,KACjCqF,MAAO,IAET,IAAIG,EAAsB,GAE1BxG,EAAO9C,OAEP,MACG8C,EAAOjD,OACNiD,EAAOnD,MAAO3B,OAASH,EAAUsF,SAA2C,MAA/BL,EAAOnD,MAAgBkE,MACtE,CACA,GAAKf,EAAOnD,MAAgB3B,OAASH,EAAUyF,MAAO,CACpD,MAAMkH,EAAW7K,EAAMwJ,MAAMxJ,EAAMwJ,MAAM7H,OAAS,GAClD,IAAKkJ,EACH,MAAM,IAAI9I,EACRf,EACAmC,EAAOnD,MAAOmE,IAAIvD,MAClB,qDAIJiK,EAASC,OAAQ,EACjBD,EAAS1G,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIvD,WAChC,GAAKuC,EAAOnD,MAAgB3B,OAASH,EAAUkB,QACpDuK,EAASjI,KAAKmC,GAAQV,QACjB,CACL,MAAOgG,KAAS+B,GAAuBtB,GAAUzG,EAAQnC,GACnD+J,EAA0B,CAC9B1M,KAAMJ,EAASe,WACfmF,IAAKrC,EAAcqH,EAAKhF,KACxBgF,KAAAA,EACA2B,OAAO,GAGT9K,EAAMwJ,MAAM9H,KAAKqJ,GACjBxE,EAAMoD,EAAUuB,GAGlB/H,EAAO9C,OAGT,GACE8C,EAAOjD,MACPiD,EAAOnD,MAAO3B,OAASH,EAAUsF,SACD,MAA/BL,EAAOnD,MAAgBkE,IAExB,MAAM,IAAInC,EACRf,EACAmC,EAAOjD,KAAOF,EAAMmE,IAAIvD,MAAQuC,EAAOnD,MAAOmE,IAAIvD,6BAC3BuC,EAAOjD,KAAO,cAAgBiD,EAAOnD,MAAOkE,OAMvE,OAFAlE,EAAMmE,IAAIzD,IAAMyC,EAAOnD,MAAOmE,IAAIzD,IAE3B,CAACV,EAAO2J,GA1foBwB,CAAYhI,EAAQnC,SAE/CiK,QACCtB,IAUX,SAAS9F,GAAQV,GAGf,MAAO,CACL9E,KAAMJ,EAASmB,QACf+E,IAAKhB,EAAOnD,MAAOmE,IACnBD,IAAKf,EAAOnD,MAAOkE,cC5DCkH,GAASC,EAAiBC,OHXpBtL,EGkB5B,SAASuL,EAAcC,EAAuBC,GAC5C,IAAK,MAAMrN,KAAQoN,EACjBE,EAAatN,EAAMqN,GAIvB,SAASC,EAAatN,EAAYqN,GAChC,MAAME,EAAQL,EAAQlN,EAAKC,MAS3B,OAPIsN,GAA0B,mBAAVA,GACjBA,EAAgBvN,EAAMqN,GAErBE,GAAUA,EAAoBC,OAC/BD,EAAoBC,MAAOxN,EAAMqN,GAG5BrN,EAAKC,MACX,KAAKJ,EAASK,SACZiN,EAAenN,EAAkBoL,MAAOpL,GACxC,MAEF,KAAKH,EAASO,MACZkN,EAActN,EAAesH,IAAKtH,GAClCmN,EAAenN,EAAeoL,MAAOpL,GACrC,MACF,KAAKH,EAASsB,SACZmM,EAActN,EAAkB+K,KAAM/K,GACtC,MAEF,KAAKH,EAASS,WACZgN,EAActN,EAAoBsH,IAAKtH,GACvCmN,EAAenN,EAAoBoL,MAAOpL,GAC1C,MACF,KAAKH,EAASwB,cACZiM,EAActN,EAAuB+K,KAAM/K,GAC3C,MAEF,KAAKH,EAASW,SACZ8M,EAActN,EAAkBsH,IAAKtH,GACrCsN,EAActN,EAAkB4B,MAAO5B,GACvC,MAEF,KAAKH,EAASa,YACZyM,EAAenN,EAAqBoL,MAAOpL,GAC3C,MACF,KAAKH,EAASe,WACZ0M,EAActN,EAAoB+K,KAAM/K,GACxC,MAEF,KAAKH,EAASiB,YACZqM,EAAenN,EAAqBoL,MAAOpL,GAC3C,MAEF,KAAKH,EAASmL,IACd,KAAKnL,EAASgK,OACd,KAAKhK,EAASyM,QACd,KAAKzM,EAASuM,MACd,KAAKvM,EAAS6L,QACd,KAAK7L,EAASmM,SACd,KAAKnM,EAASmB,QACZ,MAEF,QACE,MAAM,IAAI4C,iCAAiC5D,EAAKC,SAGhDsN,GAAUA,EAAoBE,MAC/BF,EAAoBE,KAAMzN,EAAMqN,GHpFrB,OADYzL,EGYbqL,IHX2C,mBAA3BrL,EAAMM,OAAOT,UGY1C0L,EAAcF,EAAK,MAEnBK,EAAaL,EAAK,MCTtB,MAAMS,GAAwC,IAAIC,QAC5CC,GAAYC,IACXH,GAActG,IAAIyG,IACrBH,GAAcI,IAAID,EAAM,IAAIF,SAEvBD,GAAcK,IAAIF,IAGrBG,GAAuC,IAAIL,QAC3CM,GAAWJ,IACVG,GAAa5G,IAAIyG,IACpBG,GAAaF,IAAID,EAAM,IAAIF,SAEtBK,GAAaD,IAAIF,aAGVpE,GAAQoE,EAAYR,EAAca,EAAgBC,GAGhE,GAAIlN,EAASoM,GAAS,CACpB,MAAM1L,EAAQ0L,EAAOjC,MAAMgD,QAAQF,GACnC,GAAIvM,EAAQ,EAAG,MAAM,IAAIiC,MAAM,2DAE/ByJ,EAAOjC,MAAMiD,OAAO1M,EAAO,EAAGwM,QACzB,GAAIjN,EAAQmM,GACjBA,EAAOtC,KAAOoD,MACT,CAAA,IAAI5N,EAAW8M,GAOpB,MAAM,IAAIzJ,kCAAkCyJ,EAAOpN,qBAN/CoN,EAAO/F,MAAQ4G,EACjBb,EAAO/F,IAAM6G,EAEbd,EAAOzL,MAAQuM,EAWnBG,GAAUH,EAJI,CACZ9L,MAAO6L,EAASnI,IAAIvD,MAAMD,KAAO4L,EAAYpI,IAAIvD,MAAMD,KACvDE,QAASyL,EAASnI,IAAIvD,MAAME,OAASyL,EAAYpI,IAAIvD,MAAME,SAK7D,MAAM6L,EAAgBpM,EAAQ+L,EAASnI,KACjCyI,EAAmBrM,EAAQgM,EAAYpI,KAM7C0I,GALe,CACbpM,MAAOmM,EAAiBnM,MAAQkM,EAAclM,MAC9CI,QAAS+L,EAAiB/L,QAAU8L,EAAc9L,SAGlCwL,GAAQJ,GAAOM,EAAaD,YAGhCQ,GAAOb,EAAYR,EAAcsB,EAAahN,GAC5D,IAAKV,EAASoM,GACZ,MAAM,IAAIzJ,kCAAmCyJ,EAAgBpN,oBAK/D,IAAI2O,EACAC,EAHJlN,EAAiB,MAATA,EAAgBA,EAAQ0L,EAAOjC,MAAM7H,OAIzC9C,EAAc4M,IAAWxM,EAAcwM,KACtCuB,MAAAA,EAAOC,OAAAA,GAuFd,SACExB,EACAsB,EACAhN,GAEA,IAAKhB,EAAagO,GAChB,MAAM,IAAI/K,kCAAmC+K,EAAe1O,SAI9D,MAAMwM,EAAoB,MAAT9K,EAAgB0L,EAAOjC,MAAMzJ,EAAQ,GAAK8E,EAAK4G,EAAOjC,OACjE0D,EAAmB,MAATnN,GAAiBA,IAAU0L,EAAOjC,MAAM7H,OAExD8J,EAAOjC,MAAMiD,OAAO1M,EAAO,EAAGgN,GAG9B,MAAMI,IAAkBtC,EAClBuC,GAAkBF,EAClBG,EAAaH,IAA2B,IAAhBH,EAAMjC,MAChCqC,IACFtC,EAAUC,OAAQ,GAEhBsC,IACFL,EAAMjC,OAAQ,GAKhB,MAAMwC,EAAezO,EAAc4M,IAsRrC,SAAiBD,GACf,IAAKA,EAAMhC,MAAM7H,OAAQ,OAAO,EAGhC,OADapB,EAAQiL,EAAMrH,KACf1D,MAAQ+K,EAAMhC,MAAM7H,OA1Rc4L,CAAQ9B,GAIhD7K,EAAQiK,EACV,CACElK,KAAMkK,EAAS1G,IAAIzD,IAAIC,KACvBG,OAAQwM,EACHnO,EAAU0L,GAETY,EAAOtH,IAAIvD,MAAME,OADjB+J,EAAS1G,IAAIvD,MAAME,OAErB+J,EAAS1G,IAAIzD,IAAII,QAEvBc,EAAc6J,EAAOtH,IAAIvD,OAE7B,IAAI4M,EAAgB,EACpB,GAAIF,EACFE,EAAgB,MACX,CACL,MAAMC,EAAa,EACbC,EAAe,EACrB9M,EAAME,QAAUqM,EAAgBM,EAAaC,EAE/C9M,EAAMD,MAAQ6M,EAEd,MAAMR,EAAQ,CACZvM,MAAOG,EAAMD,KAAOoM,EAAM5I,IAAIvD,MAAMD,KACpCE,QAASD,EAAME,OAASiM,EAAM5I,IAAIvD,MAAME,QAIpC6M,EAAapN,EAAQwM,EAAM5I,KAC3B8I,EAAS,CACbxM,MAAOkN,EAAWlN,OAAS+M,EAAgB,GAC3C3M,QAAS8M,EAAW9M,SAAWsM,GAAiBC,EAAiB,EAAI,IAAMC,EAAa,EAAI,IAG9F,MAAO,CAAEL,MAAAA,EAAOC,OAAAA,GAxJOW,CAAanC,EAAQsB,EAAqBhN,MAE5DiN,MAAAA,EAAOC,OAAAA,GAoCd,SACExB,EACAsB,EACAhN,GAEA,GTwKsB3B,ESxKT2O,ITyKNpO,EAAWP,IAASG,EAAQH,IAASK,EAAaL,IAASe,EAAUf,ISxK1E,MAAM,IAAI4D,kCAAmC+K,EAAe1O,aTuKxCD,ESpKtB,MAAMyM,EAAWY,EAAOjC,MAAMzJ,EAAQ,GAChC8N,EAAiB1P,EAAWsN,KAAYA,EAAOjC,MAAM7H,OAE3D8J,EAAOjC,MAAMiD,OAAO1M,EAAO,EAAGgN,GAI9B,MAAMnM,EAAQiK,EACV,CACElK,KAAMkK,EAAS1G,IAAIzD,IAAIC,KACvBG,OAAS3B,EAAU0L,GAAwCY,EAAOtH,IAAIvD,MAAME,OAA7C+J,EAAS1G,IAAIvD,MAAME,QAEpDc,EAAc6J,EAAOtH,IAAIvD,OAEvBkN,EAAWvP,EAAQwO,IAAUtO,EAAasO,GAChD,IAAIS,EAAgB,EAChBK,IAGFL,EADSM,EACO,EAEA,GAElBlN,EAAMD,MAAQ6M,EAEd,MAAMR,EAAQ,CACZvM,MAAOG,EAAMD,KAAOoM,EAAM5I,IAAIvD,MAAMD,KACpCE,QAASD,EAAME,OAASiM,EAAM5I,IAAIvD,MAAME,QAIpC6M,EAAapN,EAAQwM,EAAM5I,KAC3B8I,EAAS,CACbxM,MAAOkN,EAAWlN,OAAS+M,EAAgB,GAC3C3M,QAAS8M,EAAW9M,SAGtB,MAAO,CAAEmM,MAAAA,EAAOC,OAAAA,GAlFOc,CACnBtC,EACAsB,EACAhN,IAIJ2M,GAAUK,EAAOC,GAKjB,MAAMnC,EAAWY,EAAOjC,MAAMzJ,EAAQ,GAChCiO,EAAkBnD,GAAYwB,GAAQJ,GAAME,IAAItB,GAClDmD,IACFf,EAAOxM,OAASuN,EAAgBvN,MAChCwM,EAAOpM,SAAWmN,EAAgBnN,QAS9B9B,EAAagO,IAAUlC,GAAYY,EAAOjC,MAAMzJ,EAAQ,KAC1DkN,EAAOpM,SAAW,GAGpBwL,GAAQJ,GAAMgC,OAAOpD,IAGPwB,GAAQJ,GAChBC,IAAIa,EAAOE,YAwHLiB,GAAOjC,EAAYR,EAAcrN,GAc/C,IAAKiB,EAASoM,GACZ,MAAM,IAAIzJ,kCAAkCyJ,EAAOpN,oBAGrD,IAAI0B,EAAQ0L,EAAOjC,MAAMgD,QAAQpO,GACjC,GAAI2B,EAAQ,EAAG,CAIb,IAFAA,EAAQ0L,EAAOjC,MAAMpI,UAAU+H,GAAQ7J,EAAQ6J,IAASA,EAAKA,OAAS/K,IAE1D,EACV,MAAM,IAAI4D,MAAM,6CAGlB5D,EAAOqN,EAAOjC,MAAMzJ,GAGtB,MAAM8K,EAAWY,EAAOjC,MAAMzJ,EAAQ,GACtC,IAAIM,EAAOoL,EAAOjC,MAAMzJ,EAAQ,GAGhC0L,EAAOjC,MAAMiD,OAAO1M,EAAO,GAC3B,IAAIoO,EAAe5N,EAAQnC,EAAK+F,KAU5B9D,GAAQlB,EAAUkB,IAASA,EAAK8D,IAAIvD,MAAMD,OAASvC,EAAK+F,IAAIzD,IAAIC,OAElEwN,EAAe5N,EAAQ,CAAEK,MAAOxC,EAAK+F,IAAIvD,MAAOF,IAAKL,EAAK8D,IAAIzD,MAI9DL,EAAOoL,EAAOjC,MAAMzJ,EAAQ,GAG5B0L,EAAOjC,MAAMiD,OAAO1M,EAAO,IAI7B,MAAMqO,EAAYvD,GAAY9L,EAAa8L,GACrCwD,EAAwBxD,GAAYA,EAAS1G,IAAIzD,IAAIC,OAASvC,EAAK+F,IAAIvD,MAAMD,KAC7E2N,EAAmBjO,GAAQA,EAAK8D,IAAIvD,MAAMD,OAASvC,EAAK+F,IAAIzD,IAAIC,KAChE4N,EAAYH,IAAcC,GAAyBC,GAEnDrB,EAAS,CACbxM,QAAS0N,EAAa1N,OAAS8N,EAAY,EAAI,IAC/C1N,SAAUsN,EAAatN,SAIrBuN,GAAaC,IACfpB,EAAOpM,SAAW,GAEhBuN,GAAavD,IAAaxK,IAC3BwK,EAA+CC,OAAQ,GAI1D,MAAMtE,EAASqE,GAAYY,EACrB+C,EAAiB3D,EAAWwB,GAAQJ,GAAQD,GAASC,GACrDwC,EAAepC,GAAQJ,GACvB+B,EAAkBQ,EAAerC,IAAI3F,GACvCwH,IACFf,EAAOxM,OAASuN,EAAgBvN,MAChCwM,EAAOpM,SAAWmN,EAAgBnN,SAEpC,MAAM6N,EAAiBD,EAAatC,IAAI/N,GACpCsQ,IACFzB,EAAOxM,OAASiO,EAAejO,MAC/BwM,EAAOpM,SAAW6N,EAAe7N,SAGnC2N,EAAetC,IAAI1F,EAAQyG,YAGb0B,GACd1C,EACA7N,EACAwQ,GAA2B,GAG3B,IAAKA,EAAiB,OACtB,IAAKxQ,EAAKoL,MAAM7H,OAAQ,OAGxBkL,GAAU,CAAEpM,MAAO,EAAGI,QAAS,GAAKmL,GAASC,GAAO7N,GAGpD,MAAMyQ,EAAYhK,EAAKzG,EAAKoL,OAC5BqD,GAAU,CAAEpM,MAAO,EAAGI,QAAS,GAAKwL,GAAQJ,GAAO4C,YAGrCC,GACd7C,EACA7N,EACA2Q,GAA2B,GAG3B,IAAKA,EAAiB,OACtB,IAAK3Q,EAAKoL,MAAM7H,OAAQ,OAExB,MAAMkN,EAAYhK,EAAKzG,EAAKoL,OAC5BqF,EAAU/D,OAAQ,EAElB+B,GAAU,CAAEpM,MAAO,EAAGI,QAAS,GAAKwL,GAAQJ,GAAO4C,YAGrCG,GAAY/C,GAC1B,MAAML,EAAQI,GAASC,GACjBJ,EAAOQ,GAAQJ,GAEfgB,EAAkE,CACtExM,MAAO,EACPI,QAAS,IAGX,SAASoO,EAAW7Q,GAClBA,EAAK+F,IAAIvD,MAAMD,MAAQsM,EAAOxM,MAC9BrC,EAAK+F,IAAIvD,MAAME,QAAUmM,EAAOpM,QAAQzC,EAAK+F,IAAIvD,MAAMD,OAAS,EAEhE,MAAMuO,EAAWtD,EAAMO,IAAI/N,GACvB8Q,IACFjC,EAAOxM,OAASyO,EAASzO,MACzBwM,EAAOpM,QAAQzC,EAAK+F,IAAIvD,MAAMD,OAC3BsM,EAAOpM,QAAQzC,EAAK+F,IAAIvD,MAAMD,OAAS,GAAKuO,EAASrO,SAG5D,SAASsO,EAAS/Q,GAChBA,EAAK+F,IAAIzD,IAAIC,MAAQsM,EAAOxM,MAC5BrC,EAAK+F,IAAIzD,IAAII,QAAUmM,EAAOpM,QAAQzC,EAAK+F,IAAIzD,IAAIC,OAAS,EAE5D,MAAMyO,EAAUvD,EAAKM,IAAI/N,GACrBgR,IACFnC,EAAOxM,OAAS2O,EAAQ3O,MACxBwM,EAAOpM,QAAQzC,EAAK+F,IAAIzD,IAAIC,OACzBsM,EAAOpM,QAAQzC,EAAK+F,IAAIzD,IAAIC,OAAS,GAAKyO,EAAQvO,SAGzD,MAAMwO,EAAgB,CACpBzD,MAAOqD,EACPpD,KAAMsD,GAGR/D,GAASa,EAAM,CACbrM,CAAC3B,EAASK,UAAW+Q,EACrBzP,CAAC3B,EAASO,OAAQ6Q,EAClBzP,CAAC3B,EAASS,YAAa2Q,EACvBzP,CAAC3B,EAASiB,aAAcmQ,EACxBzP,CAAC3B,EAASa,aAAcuQ,EAExBzP,CAAC3B,EAASe,YAAaqQ,EACvBzP,CAAC3B,EAASsB,UAAW8P,EACrBzP,CAAC3B,EAASwB,eAAgB4P,EAE1BzP,CAAC3B,EAASW,UAAW,CACnBgB,MAAMxB,GACJ,MAAMkR,EAAalR,EAAK+F,IAAIvD,MAAMD,KAAOsM,EAAOxM,MAC1C8O,EAAa1D,EAAKM,IAAI/N,EAAKsH,KACjCtH,EAAKsL,SAAWuD,EAAOpM,QAAQyO,IAAe,IAAMC,EAAaA,EAAW1O,QAAU,GAEtFoO,EAAW7Q,IAEbyN,KAAMsD,GAGRvP,CAAC3B,EAASmL,KAAMiG,EAChBzP,CAAC3B,EAASgK,QAASoH,EACnBzP,CAAC3B,EAASyM,SAAU2E,EACpBzP,CAAC3B,EAASuM,OAAQ6E,EAClBzP,CAAC3B,EAAS6L,SAAUuF,EACpBzP,CAAC3B,EAASmM,UAAWiF,EACrBzP,CAAC3B,EAASmB,SAAUiQ,IAGtBvD,GAAcmC,OAAOhC,GACrBG,GAAa6B,OAAOhC,YAGNS,GACdtO,EACAoR,EACAC,EAAyC,IAEzC,MAAMC,gBAAEA,GAAkB,GAAUD,EAC9BH,EAAalR,EAAK+F,IAAIvD,MAAMD,MAC5BF,MAAEA,EAAKI,QAAEA,GAAY2O,EACrBG,EAAQvR,IACPsR,GAAmBtR,EAAK+F,IAAIvD,MAAMD,OAAS2O,IAC9ClR,EAAK+F,IAAIvD,MAAME,QAAUD,EACzBzC,EAAK+F,IAAIzD,IAAII,QAAUD,GAEzBzC,EAAK+F,IAAIvD,MAAMD,MAAQF,EACvBrC,EAAK+F,IAAIzD,IAAIC,MAAQF,GAwBvB,OArBA2K,GAAShN,EAAM,CACbwB,CAAC3B,EAASO,OAAQmR,EAClB/P,CAAC3B,EAASsB,UAAWoQ,EACrB/P,CAAC3B,EAASS,YAAaiR,EACvB/P,CAAC3B,EAASwB,eAAgBkQ,EAC1B/P,CAAC3B,EAASW,UAAUR,GAClBuR,EAAKvR,GACLA,EAAKsL,QAAU7I,GAEjBjB,CAAC3B,EAASmL,KAAMuG,EAChB/P,CAAC3B,EAASgK,QAAS0H,EACnB/P,CAAC3B,EAASyM,SAAUiF,EACpB/P,CAAC3B,EAASuM,OAAQmF,EAClB/P,CAAC3B,EAAS6L,SAAU6F,EACpB/P,CAAC3B,EAASmM,UAAWuF,EACrB/P,CAAC3B,EAASa,aAAc6Q,EACxB/P,CAAC3B,EAASe,YAAa2Q,EACvB/P,CAAC3B,EAASiB,aAAcyQ,EACxB/P,CAAC3B,EAASmB,SAAUuQ,IAGfvR,EAUT,SAASyO,GAAUI,EAAc2C,EAAkBxR,EAAYyR,GAC7D,MAAM7B,EAAkB4B,EAAQzD,IAAI0D,GAAQzR,GACxC4P,IACFf,EAAOxM,OAASuN,EAAgBvN,MAChCwM,EAAOpM,SAAWmN,EAAgBnN,SAGpC+O,EAAQ1D,IAAI9N,EAAM6O,YCxeJ6C,KACd,MAAO,CACLzR,KAAMJ,EAASK,SACf6F,IAAK,CAAEvD,MP4DF,CAAED,KAAM,EAAGG,OAAQ,GO5DFJ,IP4DjB,CAAEC,KAAM,EAAGG,OAAQ,IO3DxB0I,MAAO,aAIKuG,GAAcrK,GAC5B,MAAMsK,WAUyBtK,GAC/B,MAAMxB,EAAM+L,GAAcvK,GAE1B,MAAO,CACLrH,KAAMJ,EAASsB,SACf4E,IAAK,CACHvD,MPsCG,CAAED,KAAM,EAAGG,OAAQ,GOrCtBJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,OAAS,IAEvCwH,KAAM,CACJ9K,KAAMJ,EAASmL,IACfjF,IAAK,CACHvD,MAAO,CAAED,KAAM,EAAGG,OAAQ,GAC1BJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,OAAS,IAEvC3B,MAAO0F,EACPxB,IAAAA,IA1BcgM,CAAiBxK,GAEnC,MAAO,CACLrH,KAAMJ,EAASO,MACf2F,IAAKrC,EAAckO,EAAU7L,KAC7BuB,IAAKsK,EACLxG,MAAO,aAyBK2G,GAAmBzK,GACjC,MAAM0K,WAU8B1K,GACpC,MAAMxB,EAAM+L,GAAcvK,GAE1B,MAAO,CACLrH,KAAMJ,EAASwB,cACf0E,IAAK,CACHvD,MPMG,CAAED,KAAM,EAAGG,OAAQ,GOLtBJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,OAAS,IAEvCwH,KAAM,CACJ9K,KAAMJ,EAASmL,IACfjF,IAAK,CACHvD,MAAO,CAAED,KAAM,EAAGG,OAAQ,GAC1BJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,OAAS,IAEvC3B,MAAO0F,EACPxB,IAAAA,IA1BoBmM,CAAsB3K,GAE9C,MAAO,CACLrH,KAAMJ,EAASS,WACfyF,IAAKrC,EAAcsO,EAAgBjM,KACnCuB,IAAK0K,EACL5G,MAAO,aAyBK8G,GAAiB5K,EAAe1F,GAC9C,MAAMuQ,WA4BoBvQ,GAC1B,MAAMkE,EAAM+L,GAAcjQ,GAE1B,MAAO,CACL3B,KAAMJ,EAASmL,IACfjF,IAAK,CAAEvD,MP3CF,CAAED,KAAM,EAAGG,OAAQ,GO2CFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,SAClDuC,IAAAA,EACAlE,MAAAA,GAnCewQ,CAAY9K,IACvB5E,OAAEA,GAAWyP,EAASpM,IAAIzD,IAE1BgJ,EAAS5I,EAAS,EAQxB,OANA4L,GACE1M,EACA,CAAES,MAAO,EAAGI,QAASC,EAAS,EAAId,EAAMmE,IAAIvD,MAAME,QAClD,CAAE4O,iBAAiB,IAGd,CACLrR,KAAMJ,EAASW,SACfuF,IAAK,CACHvD,MAAOgB,EAAc2O,EAASpM,IAAIvD,OAClCF,IAAKkB,EAAc5B,EAAMmE,IAAIzD,MAE/BgF,IAAK6K,EACL7G,OAAAA,EACA1J,MAAAA,GAIJ,MAAMyQ,GAAc,iBACpB,SAASR,GAAcjQ,GACrB,OAAOA,EAAMmG,IAAIuK,GAASD,GAAYnN,KAAKoN,GAAQA,EAAOtK,KAAKC,UAAUqK,IAAQpK,KAAK,cA0ExEqK,GAAmBxH,GACjC,MAAO,CACL9K,KAAMJ,EAASe,WACfmF,IAAKrC,EAAcqH,EAAKhF,KACxBgF,KAAAA,EACA2B,OAAO,YClLK8F,GAAeC,GA0B7B,OAzB0BA,EAASrH,MAAMsH,OAAO3H,IAC9C,IAAKxK,EAAWwK,GAAO,OAAO,EAE9B,MAAM4H,EAAkB9R,EAAckK,EAAKnJ,OACrCgR,EACJnS,EAAcsK,EAAKnJ,QACnBmJ,EAAKnJ,MAAMwJ,MAAM7H,QACjB1C,EAAckK,EAAKnJ,MAAMwJ,MAAM,GAAGL,MAEpC,OAAO4H,GAAmBC,IAGVC,QAAQ7S,IACxB8P,GAAO2C,EAAUA,EAAUzS,GAEvBa,EAAcb,EAAK4B,OACrB8M,GAAO+D,EAAUA,EAYvB,SAAqBK,GACnB,MAAMzH,EAAQsG,GAAcmB,EAAUxL,IAAI1F,OAE1C,IAAK,MAAMmJ,KAAS+H,EAAUlR,MAAsBwJ,MAClDsD,GAAOrD,EAAOA,EAAON,EAAKA,MAI5B,OADA6F,GAAYvF,GACLA,EApBwB0H,CAAY/S,IAuB7C,SAA0B8S,GACxB,MAAMjF,EAAO6D,KAEb,IAAK,MAAMsB,KAAsBF,EAAUlR,MAAsBwJ,MAAO,CACtE,MAAM6H,EAAclB,GAAmBe,EAAUxL,IAAI1F,OACrD8M,GAAOb,EAAMA,EAAMoF,GAEnB,IAAK,MAAMC,KAAsBF,EAAkBjI,KAAqBK,MACtEsD,GAAOb,EAAMoF,EAAaC,EAAkBnI,MAKhD,OADA6F,GAAY/C,GACLA,EAAKzC,MAlCR+H,CAAiBnT,GAAM6S,QAAQI,IAC7BvE,GAAO+D,EAAUA,EAAUQ,OAKjCrC,GAAY6B,GACLA,WAmCOW,GAAiBX,GAC/B,IAAI7D,EAAQ,EACRnC,EAAW,EACf,IAAK,MAAM1B,KAAQ0H,EAASrH,MACT,IAAbqB,GAAkB1B,EAAKhF,IAAIvD,MAAMD,KAAO,EAE1CqM,EAAQ,EAAI7D,EAAKhF,IAAIvD,MAAMD,KAClBwI,EAAKhF,IAAIvD,MAAMD,KAAOqM,EAAQnC,EAAW,IAClDmC,GAASnC,EAAW,GAAK1B,EAAKhF,IAAIvD,MAAMD,KAAOqM,IAGjDN,GAAUvD,EAAM,CACd1I,MAAOuM,EACPnM,QAAS,IAEXgK,EAAW1B,EAAKhF,IAAIzD,IAAIC,KAG1B,OAAOkQ,ECpFT,MAAMY,GAAiB,CACrBC,WAAY,GACZC,eAAe,EACfC,gBAAgB,YAGMC,GAAQ7R,EAAY8R,EAAiB,IAC3DA,EAAS9M,OAAO+M,OAAO,GAAIN,GAAgBK,GAC3C9R,EAAQgS,GAAOhS,GAEf,MAAM6Q,EAAWf,KACjB,IAAK,MAAM3G,KAAQ8I,GAAWjS,EAAO8R,GACnChF,GAAO+D,EAAUA,EAAU1H,GAc7B,OAZA6F,GAAY6B,GAKMjL,EAChBiL,EACAD,GACAC,aDuC6BA,EAAoBiB,GAEnD,OAAOjB,GCzCOqB,CAAiBrB,GAC7BW,IAMJ,SAAUS,GAAWxM,EAAaqM,GAChC,IAAK,MAAMpM,KAAOV,OAAOiB,KAAKR,SACtB6K,GAAiB,CAAC5K,GAAMkE,GAAUnE,EAAOC,GAAMoM,IAIzD,SAASlI,GAAU5J,EAAY8R,GAC7B,GAAa,MAAT9R,EACF,MAAM,IAAIgC,MAAM,mDAGlB,gBP/CuBhC,GACvB,MAAwB,iBAAVA,EO8CVmS,CAASnS,YF8EgBA,GAC7B,MAAMkE,EAAMkC,KAAKC,UAAUrG,GAE3B,MAAO,CACL3B,KAAMJ,EAASgK,OACf9D,IAAK,CAAEvD,MPtDF,CAAED,KAAM,EAAGG,OAAQ,GOsDFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,SAClDuC,IAAAA,EACAlE,MAAAA,GEpFOoS,CAAepS,GACbkF,EAAUlF,YFuFSA,GAC9B,MAAMkE,EAAMlE,EAAMqF,WAElB,MAAO,CACLhH,KAAMJ,EAASyM,QACfvG,IAAK,CAAEvD,MPjEF,CAAED,KAAM,EAAGG,OAAQ,GOiEFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,SAClDuC,IAAAA,EACAlE,MAAAA,GE7FOqS,CAAgBrS,YP1CHA,GACtB,MAAwB,iBAAVA,IAAuBkF,EAAUlF,GO0CpCsS,CAAQtS,YFgGSA,GAC5B,MAAMkE,EAAMlE,EAAMqF,WAElB,MAAO,CACLhH,KAAMJ,EAASuM,MACfrG,IAAK,CAAEvD,MP5EF,CAAED,KAAM,EAAGG,OAAQ,GO4EFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,SAClDuC,IAAAA,EACAlE,MAAAA,GEtGOuS,CAAcvS,YPxCCA,GACxB,MAAwB,kBAAVA,EOwCHwS,CAAUxS,YFyGSA,GAC9B,MAAO,CACL3B,KAAMJ,EAAS6L,QACf3F,IAAK,CAAEvD,MPrFF,CAAED,KAAM,EAAGG,OAAQ,GOqFFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQd,EAAQ,EAAI,IAC1DA,MAAAA,GE5GOyS,CAAgBzS,GACdmF,EAAOnF,YF+GaA,GAC/B,MAAMkE,EAAMlE,EAAMkK,cAElB,MAAO,CACL7L,KAAMJ,EAASmM,SACfjG,IAAK,CAAEvD,MP/FF,CAAED,KAAM,EAAGG,OAAQ,GO+FFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQoD,EAAIvC,SAClDuC,IAAAA,EACAlE,MAAAA,GErHO0S,CAAiB1S,GACfiB,MAAMC,QAAQlB,GAO3B,SAAyBA,EAAmB8R,GAC1C,MAAM7G,EFiHC,CACL5M,KAAMJ,EAASa,YACfqF,IAAK,CAAEvD,MPxGF,CAAED,KAAM,EAAGG,OAAQ,GOwGFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQ,IAC9C0I,MAAO,IEnHT,IAAK,MAAMmJ,KAAW3S,EAAO,CAC3B,MAAMmJ,EAAOS,GAAU+I,EAASb,GAC1BV,EAAoBT,GAAmBxH,GAE7C2D,GAAO7B,EAAcA,EAAcmG,GAMrC,OAJAzC,GAAoB1D,EAAcA,EAAc6G,EAAOF,gBACvD9C,GAAmB7D,EAAcA,EAAc6G,EAAOH,eACtD3C,GAAY/D,GAELA,EAlBE2H,CAAgB5S,EAAO8R,GAqBlC,SAAyB9R,EAAe8R,GAEtC,IAAKvM,EADLvF,EAAQgS,GAAOhS,IACO,OAAO4J,GAAU5J,EAAO8R,GAE9C,MAAMe,EFgHC,CACLxU,KAAMJ,EAASiB,YACfiF,IAAK,CAAEvD,MPzHF,CAAED,KAAM,EAAGG,OAAQ,GOyHFJ,IAAK,CAAEC,KAAM,EAAGG,OAAQ,IAC9C0I,MAAO,IElHHA,EAAQ,IAAIyI,GAAWjS,EAAO8R,IACpC,IAAK,MAAM3I,KAAQK,EAAO,CACxB,MAAM8H,EAAoBX,GAAmBxH,GAE7C2D,GAAO+F,EAAcA,EAAcvB,GAMrC,OAJA3C,GAAoBkE,EAAcA,EAAcf,EAAOF,gBACvD9C,GAAmB+D,EAAcA,EAAcf,EAAOH,eACtD3C,GAAY6D,GAELA,EAlCEC,CAAgB9S,EAAO8R,GAqClC,SAASE,GAAOhS,GACd,OAAOA,IAAUmF,EAAOnF,IAAkC,mBAAjBA,EAAMgS,OAAwBhS,EAAMgS,SAAWhS,ECvG1F,MAAMsB,GAAc,sBAEIyR,GAAO1H,EAAU2H,EAAkB,MACzD,MAAMvS,EAAkB,GAqExB,OAnEA2K,GAASC,EAAK,CACZzL,CAAC3B,EAASsB,UAAUnB,GAClB,MAAMwC,MAAEA,EAAKF,IAAEA,GAAQtC,EAAK+F,IAE5B8O,GAAMxS,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,KAC7EmS,GAAMxS,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,MAE3Ed,CAAC3B,EAASwB,eAAerB,GACvB,MAAMwC,MAAEA,EAAKF,IAAEA,GAAQtC,EAAK+F,IAE5B8O,GAAMxS,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,MAC7EmS,GAAMxS,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,OAG3Ed,CAAC3B,EAASW,UAAUR,GAClB,MACEwC,OAAOD,KAAEA,IACPvC,EAAK+F,IACT8O,GACExS,EACA,CAAEG,MAAO,CAAED,KAAAA,EAAMG,OAAQ1C,EAAKsL,QAAUhJ,IAAK,CAAEC,KAAAA,EAAMG,OAAQ1C,EAAKsL,OAAS,IAC3E,MAGJ9J,CAAC3B,EAASmL,KAAKhL,GACb6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK8F,MAG9BtE,CAAC3B,EAASgK,QAAQ7J,GAChB6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK8F,MAE9BtE,CAAC3B,EAASyM,SAAStM,GACjB6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK8F,MAE9BtE,CAAC3B,EAASuM,OAAOpM,GACf6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK8F,MAE9BtE,CAAC3B,EAAS6L,SAAS1L,GACjB6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK4B,MAAMqF,aAEpCzF,CAAC3B,EAASmM,UAAUhM,GAClB6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK8F,MAG9BtE,CAAC3B,EAASa,aAAaV,GACrB,MAAMwC,MAAEA,EAAKF,IAAEA,GAAQtC,EAAK+F,IAC5B8O,GAAMxS,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,KAC7EmS,GAAMxS,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,MAG3Ed,CAAC3B,EAASiB,aAAad,GACrB,MAAMwC,MAAEA,EAAKF,IAAEA,GAAQtC,EAAK+F,IAC5B8O,GAAMxS,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,KAC7EmS,GAAMxS,EAAO,CAAEG,MAAO,CAAED,KAAMD,EAAIC,KAAMG,OAAQJ,EAAII,OAAS,GAAKJ,IAAAA,GAAO,MAE3Ed,CAAC3B,EAASe,YAAYZ,GACpB,IAAKA,EAAK0M,MAAO,OAEjB,MAAMlK,EAAQxC,EAAK+F,IAAIzD,IACvBuS,GAAMxS,EAAO,CAAEG,MAAAA,EAAOF,IAAK,CAAEC,KAAMC,EAAMD,KAAMG,OAAQF,EAAME,OAAS,IAAO,MAG/ElB,CAAC3B,EAASmB,SAAShB,GACjB6U,GAAMxS,EAAOrC,EAAK+F,IAAK/F,EAAK8F,QAIzBzD,EAAM6F,KAAK0M,GAAWA,EAG/B,SAASC,GAAMxS,EAAiB0D,EAAeD,GAC7C,MAAMgP,EAAYhP,EAAIiG,MAAM7I,IACtB6R,EAAiBhP,EAAIzD,IAAIC,KAAOwD,EAAIvD,MAAMD,KAAO,EAEvD,GAAIuS,EAAUvR,SAAWwR,EACvB,MAAM,IAAInR,4DAC8CmR,gBAA6BjP,MAIvF,IAAK,IAAIyC,EAAIxC,EAAIvD,MAAMD,KAAMgG,GAAKxC,EAAIzD,IAAIC,KAAMgG,IAAK,CACnD,MAAMhG,EAAOyB,GAAQ3B,EAAOkG,GACtByM,EAAgBzM,IAAMxC,EAAIvD,MAAMD,KAChC0S,EAAc1M,IAAMxC,EAAIzD,IAAIC,KAE5B2I,EAAS8J,EACXzS,EAAKwB,OAAO,EAAGgC,EAAIvD,MAAME,QAAQwS,OAAOnP,EAAIvD,MAAME,OAAQiC,GAC1D,GACEwG,EAAQ8J,EAAc1S,EAAKwB,OAAOgC,EAAIzD,IAAII,QAAU,GAE1DL,EAAMkG,EAAI,GAAK2C,EAAS4J,EAAUvM,EAAIxC,EAAIvD,MAAMD,MAAQ4I,GAI5D,SAASnH,GAAQ3B,EAAiBV,GAChC,IAAKU,EAAMV,EAAQ,GACjB,IAAK,IAAI4G,EAAI,EAAGA,EAAI5G,EAAO4G,IACpBlG,EAAMkG,KAAIlG,EAAMkG,GAAK,IAI9B,OAAOlG,EAAMV,EAAQ,YC1GCwT,GAAKlI,EAAUrK,EAAgB,IACrD,MAAMZ,EAAS2E,IACTyO,EAAsB,IAAIC,IAC1BC,EAA4B,IAAID,IAChCE,EAAuB,IAAIF,IACjC,IACIG,EADAC,EAAczT,EAEd0T,GAAO,EA0EX,OAxEA1I,GAASC,EAAK,CACZzL,CAAC3B,EAASO,OAAOJ,GACf,MAAMsH,EAAMtH,EAAKsH,IAAIyD,KAAKnJ,MAC1B,IACE+T,GAAY3T,EAAQsF,EAAKtH,EAAKC,KAAM,CAAEmV,OAAAA,EAAQE,aAAAA,EAAcC,QAAAA,IAC5D,MAAOK,GACP,MAAM,IAAIjS,EAAWf,EAAO5C,EAAKsH,IAAIvB,IAAIvD,MAAOoT,EAAI/R,SAGtD,MAAMgS,EAAaC,GAAQxO,GAC3B8N,EAAOW,IAAIF,GACXN,EAAQQ,IAAIF,GAEZJ,EAASO,GAAYhU,EAAQsF,IAG/B9F,CAAC3B,EAASS,YAAYN,GACpB,MAAMsH,EAAMtH,EAAKsH,IAAIyD,KAAKnJ,MAE1B,IACE+T,GAAY3T,EAAQsF,EAAKtH,EAAKC,KAAM,CAAEmV,OAAAA,EAAQE,aAAAA,EAAcC,QAAAA,IAC5D,MAAOK,GACP,MAAM,IAAIjS,EAAWf,EAAO5C,EAAKsH,IAAIvB,IAAIvD,MAAOoT,EAAI/R,SAGtD,MAAMgS,EAAaC,GAAQxO,GAC3BgO,EAAaS,IAAIF,GACjBN,EAAQQ,IAAIF,GAEZJ,EA6HN,SAA0BpO,EAAaC,GACrC,MAAMc,EAAS6N,GAAO5O,EAAQC,EAAI4O,MAAM,GAAI,IACtCC,EAAW1P,EAAKa,GACjBc,EAAO+N,KACV/N,EAAO+N,GAAY,IAGrB,MAAMlU,EAAO0E,IAGb,OAFAyB,EAAO3B,EAAKa,IAAOhE,KAAKrB,GAEjBA,EAvIMmU,CAAiBpU,EAAQsF,IAGpC9F,CAAC3B,EAASW,UAAW,CACnBgB,MAAMxB,GACJ,GAAI0V,EAAM,OAEV,MAAMpO,EAAMtH,EAAKsH,IAAI1F,MACrB,IACE+T,GAAYF,EAAQnO,EAAKtH,EAAKC,KAAM,CAAEmV,OAAAA,EAAQE,aAAAA,EAAcC,QAAAA,IAC5D,MAAOK,GACP,MAAM,IAAIjS,EAAWf,EAAO5C,EAAKsH,IAAIvB,IAAIvD,MAAOoT,EAAI/R,SAGtD,MAAMjC,WAgCEyU,EAAQrW,GACtB,OAAQA,EAAKC,MACX,KAAKJ,EAASiB,YACZ,MAAMkB,EAAS2E,IAUf,OARA3G,EAAKoL,MAAMyH,QAAQ,EAAG9H,KAAAA,MACpB,MAAMzD,EAAMyD,EAAKzD,IAAI1F,MACfA,EAAQyU,EAAQtL,EAAKnJ,OAErBwG,EAASd,EAAI/D,OAAS,EAAIyS,GAAYhU,EAAQsF,EAAI4O,MAAM,GAAI,IAAMlU,EACxEoG,EAAO3B,EAAKa,IAAS1F,IAGhBI,EAET,KAAKnC,EAASa,YACZ,OAAOV,EAAKoL,MAAMrD,IAAIgD,GAAQsL,EAAQtL,EAAKA,OAE7C,KAAKlL,EAASgK,OACd,KAAKhK,EAASyM,QACd,KAAKzM,EAASuM,MACd,KAAKvM,EAAS6L,QACd,KAAK7L,EAASmM,SACZ,OAAOhM,EAAK4B,MAEd,QACE,MAAM,IAAIgC,kCAAmC5D,EAAcC,UA1D3CoW,CAAQrW,EAAK4B,QACZ0F,EAAI/D,OAAS,EAAIyS,GAAYP,EAAQnO,EAAI4O,MAAM,GAAI,IAAMT,GAEjEhP,EAAKa,IAAS1F,EACrB2T,EAAQQ,IAAID,GAAQxO,IAEhBzG,EAAcb,EAAK4B,SACrB4T,EAAkBC,EAClBA,EAAS7T,IAGbJ,KAAKxB,GACCa,EAAcb,EAAK4B,SACrB6T,EAASD,KAKfhU,CAAC3B,EAASiB,aAAc,CACtBU,QAEEkU,GAAO,GAETlU,OACEkU,GAAO,MAKN1T,EAiCT,SAAS2T,GACPtO,EACAC,EACArH,EACAqW,GAGA,IAAIC,EAAkB,GAClB5U,EAAQ,EACZ,IAAK,MAAM2Q,KAAQhL,EAAK,CAGtB,GAFAiP,EAAMjT,KAAKgP,IAENlL,EAAIC,EAAQiL,GAAO,OACxB,GA2DsB,iBADL1Q,EA1DDyF,EAAOiL,MA2DYvL,EAAOnF,GA1DxC,MAAM,IAAIgC,2DAA2D2S,EAAMrO,KAAK,QAGlF,MAAMsO,EAAeV,GAAQS,GAC7B,GAAI1T,MAAMC,QAAQuE,EAAOiL,MAAWgE,EAAMhB,aAAalO,IAAIoP,GACzD,MAAM,IAAI5S,sDAAsD4S,KAGlE,MAAMC,EAAe9U,IAAU2F,EAAI/D,OAAS,EAC5C8D,EAASxE,MAAMC,QAAQuE,EAAOiL,KAAUmE,EAAehQ,EAAKY,EAAOiL,IAASjL,EAAOiL,GAgDvF,IAAqB1Q,EA7CnB,MAAMiU,EAAaC,GAAQxO,GAG3B,GAAID,GAAUpH,IAASJ,EAASO,OAASkW,EAAMf,QAAQnO,IAAIyO,GACzD,MAAM,IAAIjS,6DAA6DiS,KAIzE,GAAIxO,GAAUpH,IAASJ,EAASS,aAAegW,EAAMhB,aAAalO,IAAIyO,GACpE,MAAM,IAAIjS,kEAAkEiS,KAIhF,SAASG,GAAY3O,EAAaC,GAChC,MAAMc,EAAS6N,GAAO5O,EAAQC,EAAI4O,MAAM,GAAI,IACtCC,EAAW1P,EAAKa,GAKtB,OAJKc,EAAO+N,KACV/N,EAAO+N,GAAYxP,KAGdyB,EAAO+N,GAgBhB,SAASF,GAAO5O,EAAaQ,GAC3B,OAAOA,EAAKH,OAAO,CAAC+N,EAAQiB,KACrBjB,EAAOiB,KACVjB,EAAOiB,GAAU/P,KAEZ9D,MAAMC,QAAQ2S,EAAOiB,IAAWjQ,EAAKgP,EAAOiB,IAAWjB,EAAOiB,IACpErP,GAOL,SAASyO,GAAQxO,GACf,OAAOA,EAAIY,KAAK,KChMlB,IAAYyO,YAsDYC,GAAK1L,EAAaC,EAAY0L,EAAa,IACjE,OAAI3L,IAAWC,IVNkB2L,EUMU3L,EVLpCpE,EADkBgQ,EUMU7L,IVLfnE,EAAO+P,IAAMC,EAAEjL,gBAAkBgL,EAAEhL,eUM9C,GAGLjJ,MAAMC,QAAQoI,IAAWrI,MAAMC,QAAQqI,GAmE7C,SAAuBD,EAAeC,EAAc0L,EAAa,IAC/D,IAAIG,EAAoB,GAGxB,MAAMC,EAAgB/L,EAAOnD,IAAIH,GAC3BsP,EAAe/L,EAAMpD,IAAIH,GAG/BsP,EAAarE,QAAQ,CAACjR,EAAOD,KAC3B,MAAMwV,EAAWxV,GAASsV,EAAc1T,OAGxC,IAAK4T,GAAYF,EAActV,KAAWC,EACxC,OAIF,MAAM6P,EAAOwF,EAAc7I,QAAQxM,EAAOD,EAAQ,GAClD,IAAKwV,GAAY1F,GAAQ,EAAG,CAC1BuF,EAAQ1T,KAAK,CACXrD,KAAM0W,GAAWS,KACjBP,KAAAA,EACApF,KAAAA,EACA4F,GAAI1V,IAGN,MAAM4P,EAAO0F,EAAc5I,OAAOoD,EAAM,GAGxC,YAFAwF,EAAc5I,OAAO1M,EAAO,KAAM4P,GAMpC,MAAM+F,GAAWJ,EAAaK,SAASN,EAActV,IACrD,IAAKwV,GAAYG,EAIf,OAHAnP,EAAM6O,EAASJ,GAAK1L,EAAOvJ,GAAQwJ,EAAMxJ,GAAQkV,EAAKW,OAAO7V,UAC7DsV,EAActV,GAASC,GAMzBoV,EAAQ1T,KAAK,CACXrD,KAAM0W,GAAWc,IACjBZ,KAAMA,EAAKW,OAAO7V,KAEpBsV,EAAc5I,OAAO1M,EAAO,EAAGC,KAIjC,IAAK,IAAI2G,EAAI2O,EAAa3T,OAAQgF,EAAI0O,EAAc1T,OAAQgF,IAC1DyO,EAAQ1T,KAAK,CACXrD,KAAM0W,GAAWe,OACjBb,KAAMA,EAAKW,OAAOjP,KAItB,OAAOyO,EA3HEW,CAAczM,EAAQC,EAAO0L,GAC3B1P,EAAS+D,IAAW/D,EAASgE,GAY1C,SAAwBD,EAAaC,EAAY0L,EAAa,IAC5D,IAAIG,EAAoB,GAGxB,MAAMY,EAAchR,OAAOiB,KAAKqD,GAC1B+L,EAAgBW,EAAY7P,IAAIT,GAAOM,EAAgBsD,EAAO5D,KAC9DuQ,EAAajR,OAAOiB,KAAKsD,GACzB+L,EAAeW,EAAW9P,IAAIT,GAAOM,EAAgBuD,EAAM7D,KAI3DwQ,EAAW,CAACC,EAAgBC,KAChC,MAAMrW,EAAQqW,EAAO5J,QAAQ2J,GAC7B,GAAIpW,EAAQ,EAAG,OAAO,EAEtB,MAAMsW,EAAaL,EAAYX,EAAc7I,QAAQ2J,IACrD,OAAQF,EAAWN,SAASU,IAkC9B,OA9BAL,EAAY/E,QAAQ,CAACvL,EAAK3F,KACxB,MAAMuW,EAAWrB,EAAKW,OAAOlQ,GAC7B,GAAIuQ,EAAWN,SAASjQ,GACtBa,EAAM6O,EAASJ,GAAK1L,EAAO5D,GAAM6D,EAAM7D,GAAM4Q,SACxC,GAAIJ,EAASb,EAActV,GAAQuV,GAAe,CACvD,MAAMG,EAAKQ,EAAWX,EAAa9I,QAAQ6I,EAActV,KACzDqV,EAAQ1T,KAAK,CACXrD,KAAM0W,GAAWwB,OACjBtB,KAAAA,EACApF,KAAMnK,EACN+P,GAAAA,SAGFL,EAAQ1T,KAAK,CACXrD,KAAM0W,GAAWe,OACjBb,KAAMqB,MAMZL,EAAWhF,QAAQ,CAACvL,EAAK3F,KAClBiW,EAAYL,SAASjQ,IAASwQ,EAASZ,EAAavV,GAAQsV,IAC/DD,EAAQ1T,KAAK,CACXrD,KAAM0W,GAAWc,IACjBZ,KAAMA,EAAKW,OAAOlQ,OAKjB0P,EA7DEoB,CAAelN,EAAQC,EAAO0L,GAE9B,CACL,CACE5W,KAAM0W,GAAW0B,KACjBxB,KAAAA,QVlBmBE,EAAQD,WW/CXwB,GAAWtY,EAAY6W,GAC7C,IAAKA,EAAKtT,OAAQ,OAAOvD,EAEzB,GAAIO,EAAWP,GACb,OAAOsY,GAAWtY,EAAK4B,MAAOiV,GAGhC,MAAM1T,EAAqC,GAC3C,IAAIoV,EAqCJ,GApCItX,EAASjB,IACXA,EAAKoL,MAAMoN,KAAK,CAACzN,EAAMpJ,KACrB,IACE,IAAI2F,EAAY,GAChB,GAAI/G,EAAWwK,GACbzD,EAAMyD,EAAKzD,IAAI1F,WACV,GAAIzB,EAAQ4K,GACjBzD,EAAMyD,EAAKzD,IAAIyD,KAAKnJ,WACf,GAAIvB,EAAa0K,GAAO,CAG7B,MAAM0N,EAAa7Q,EAFnBN,EAAMyD,EAAKzD,IAAIyD,KAAKnJ,OAGfuB,EAAQsV,KACXtV,EAAQsV,GAAc,GAExB,MAAMC,EAAcvV,EAAQsV,KAE5BnR,EAAMA,EAAIkQ,OAAOkB,QACR/X,EAAaoK,IAASxK,EAAWwK,EAAKA,MAC/CzD,EAAMyD,EAAKA,KAAKzD,IAAI1F,MACXjB,EAAaoK,KACtBzD,EAAM,CAAC3F,IAGT,SAAI2F,EAAI/D,kBXImBwT,EAAYD,GAC7C,GAAIC,EAAExT,SAAWuT,EAAEvT,OAAQ,OAAO,EAElC,IAAK,IAAIgF,EAAI,EAAGA,EAAIwO,EAAExT,OAAQgF,IAC5B,GAAIwO,EAAExO,KAAOuO,EAAEvO,GAAI,OAAO,EAG5B,OAAO,EWXiBoQ,CAAYrR,EAAKuP,EAAKX,MAAM,EAAG5O,EAAI/D,YACnDgV,EAAQD,GAAWvN,EAAM8L,EAAKX,MAAM5O,EAAI/D,UACjC,GAIT,MAAOqS,GACP,OAAO,MAKR2C,EACH,MAAM,IAAI3U,qCAAqCiT,EAAK3O,KAAK,QAG3D,OAAOqQ,WAGOK,GAAc5Y,EAAY6W,GACxC,IACE,OAAOyB,GAAWtY,EAAM6W,GACxB,MAAOjB,cAGKiD,GAAW7Y,EAAY6W,GACrC,IACIxJ,EADAyL,EAAcjC,EAElB,KAAOiC,EAAYvV,SAAW8J,GAE5BA,EAASuL,GAAc5Y,EADvB8Y,EAAcA,EAAY5C,MAAM,GAAI,IAItC,IAAK7I,EACH,MAAM,IAAIzJ,6CAA6CiT,EAAK3O,KAAK,QAGnE,OAAOmF,GDxET,SAAYsJ,GACVA,YACAA,cACAA,kBACAA,cACAA,kBALF,CAAYA,KAAAA,yBEGU/U,GACpB,OAAOuT,GAAKxK,GAAU/I,GAAQA,qBCkBFsM,EAAkB6K,EAAcrF,GAC5D,MACMtI,EAAQ,IADOT,GAAUuD,IAGzB8K,EAAc7D,GAAK/J,GAYzB,OAAOuJ,GAGT,SAAsBsE,EAAoBF,EAAmB/B,GAoG3D,OAzFAA,EAAQnE,QAAQqG,IACd,YHzCkBA,GACpB,OAAOA,EAAOjZ,OAAS0W,GAAWc,IGwC5B0B,CAAMD,GAAS,CACjB,MAAMvK,EAAQ2J,GAAWS,EAASG,EAAOrC,MACnCiC,EAAcI,EAAOrC,KAAKX,MAAM,GAAI,GAC1C,IAUI7I,EAVA1L,EAAQ8E,EAAKyS,EAAOrC,MAEpBuC,EAAiB/Y,EAAasO,GAClC,GAAI7H,EAAUnF,KAAWmX,EAAYN,KAAK1R,GAAY,CACpD,MAAMuS,EAAUT,GAAcK,EAAUH,EAAYtB,OAAO,IACvD6B,GAAWhZ,EAAagZ,KAC1BD,GAAiB,GAKrB,GAAIjZ,EAAQwO,GACVtB,EAAS4L,OACJ,GAAIG,EAAgB,CACzB/L,EAAS4L,EAIT,MAAMxG,EAAWwG,EACX/N,EAAS0N,GAAcnG,EAAUqG,EAAYtB,OAAO7V,EAAQ,IAC5DwJ,EAAQyN,GAAcnG,EAAUqG,EAAYtB,OAAO7V,IAEvDA,EADEwJ,EACMsH,EAASrH,MAAMgD,QAAQjD,GACtBD,EACDuH,EAASrH,MAAMgD,QAAQlD,GAAU,EAEjCuH,EAASrH,MAAM7H,YAIrBhD,EADJ8M,EAASwL,GAAWI,EAAUC,EAAOrC,SACbxJ,EAASA,EAAOzL,OAGtCvB,EAAagN,IAAW5M,EAAc4M,IAAWtN,EAAWsN,GAC9DqB,GAAOuK,EAAU5L,EAAQsB,EAAOhN,GAEhC+M,GAAOuK,EAAU5L,EAAQsB,QAEtB,YH1EYuK,GACrB,OAAOA,EAAOjZ,OAAS0W,GAAW0B,KGyErBiB,CAAOJ,GAAS,CACzB,IAEI7L,EAFAa,EAAWoK,GAAWW,EAAUC,EAAOrC,MACvC1I,EAAcmK,GAAWS,EAASG,EAAOrC,MAGzCtW,EAAW2N,IAAa3N,EAAW4N,IAErCd,EAASa,EACTA,EAAWA,EAAStM,MACpBuM,EAAcA,EAAYvM,OAE1ByL,EAASwL,GAAWI,EAAUC,EAAOrC,MAGvCpN,GAAQwP,EAAU5L,EAAQa,EAAUC,QAC/B,YHjFc+K,GACvB,OAAOA,EAAOjZ,OAAS0W,GAAWe,OGgFrB6B,CAASL,GAAS,CAC3B,IAAI7L,EAASwL,GAAWI,EAAUC,EAAOrC,MACrCtW,EAAW8M,KAASA,EAASA,EAAOzL,OAExC,MAAM5B,EAAOsY,GAAWW,EAAUC,EAAOrC,MAEzC/G,GAAOmJ,EAAU5L,EAAQrN,QACpB,YH9EYkZ,GACrB,OAAOA,EAAOjZ,OAAS0W,GAAWS,KG6ErBoC,CAAON,GAAS,CACzB,IAAI7L,EAASiL,GAAWW,EAAUC,EAAOrC,MACrC3V,EAAQmM,KAASA,EAASA,EAAOtC,MACjCxK,EAAW8M,KAASA,EAASA,EAAOzL,OAExC,MAAM5B,EAAQqN,EAAqBjC,MAAM8N,EAAOzH,MAEhD3B,GAAOmJ,EAAU5L,EAAQrN,GACzB0O,GAAOuK,EAAU5L,EAAQrN,EAAMkZ,EAAO7B,SACjC,YH7Ec6B,GACvB,OAAOA,EAAOjZ,OAAS0W,GAAWwB,OG4ErBL,CAASoB,GAAS,CAC3B,IAAI7L,EAASiL,GAAWW,EAAUC,EAAOrC,KAAKW,OAAO0B,EAAOzH,OAGxDtD,EAAcmK,GAAWS,EAASG,EAAOrC,KAAKW,OAAO0B,EAAO7B,KAI5DnW,EAAQmM,KAASA,EAASA,EAAOtC,MACjC7J,EAAQiN,KAAcA,EAAcA,EAAYpD,MAEpDtB,GAAQwP,EAAU5L,EAAQA,EAAO/F,IAAK6G,EAAY7G,QAItDsJ,GAAYqI,GACLA,EAzGkBQ,CATW,CAClCxZ,KAAMJ,EAASK,SACf6F,IAAK,CAAEvD,MAAO,CAAED,KAAM,EAAGG,OAAQ,GAAKJ,IAAK,CAAEC,KAAM,EAAGG,OAAQ,IAC9D0I,MAAAA,GAGuBqI,GAAQsF,EAASrF,GAC1BkD,GAAKoC,EAAaD,IAIH3N,6BD/BPxJ,EAAY8R,GAEpC,OAAOiB,GADUlB,GAAQ7R,EAAO8R,GACTtI"} \ No newline at end of file diff --git a/node_modules/toml-patch/package.json b/node_modules/toml-patch/package.json new file mode 100644 index 0000000..cf7e767 --- /dev/null +++ b/node_modules/toml-patch/package.json @@ -0,0 +1,61 @@ +{ + "name": "toml-patch", + "version": "0.2.3", + "author": "Tim Hall ", + "license": "MIT", + "description": "Patch, parse, and stringify TOML", + "homepage": "https://github.com/timhall/toml-patch", + "keywords": [ + "toml", + "patch", + "parse", + "stringify", + "parser", + "encoder", + "decoder" + ], + "main": "dist/toml-patch.cjs.min.js", + "module": "dist/toml-patch.es.js", + "browser": "dist/toml-patch.umd.min.js", + "types": "dist/toml-patch.d.ts", + "scripts": { + "test": "jest", + "typecheck": "tsc", + "specs": "jest --config specs.config.js", + "benchmark": "node benchmark/index.js", + "build": "rimraf dist && rollup -c", + "prepublishOnly": "npm run build" + }, + "devDependencies": { + "@types/glob": "^7", + "@types/jest": "^24", + "@types/js-yaml": "^3", + "benchmark": "^2", + "glob": "^7", + "jest": "^24", + "js-yaml": "^3", + "mri": "^1", + "npm-run-all": "^4", + "rimraf": "^2", + "rollup": "^1", + "rollup-plugin-dts": "^0.14", + "rollup-plugin-filesize": "^6", + "rollup-plugin-terser": "^4", + "rollup-plugin-typescript": "^1", + "ts-jest": "^24", + "tslib": "^1", + "typescript": "^3" + }, + "jest": { + "testEnvironment": "node", + "preset": "ts-jest", + "testRegex": "/__tests__/.*\\.[jt]sx?$" + }, + "prettier": { + "singleQuote": true, + "printWidth": 100 + }, + "files": [ + "dist/" + ] +} diff --git a/package.json b/package.json index 71dfcbd..a500416 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ "@actions/core": "^1.4.0", "@actions/exec": "^1.1.0", "@actions/github": "^5.0.0", - "@iarna/toml": "^2.2.5", - "standard-version": "^9.3.0" + "standard-version": "^9.3.0", + "toml-patch": "^0.2.3" } } diff --git a/yarn.lock b/yarn.lock index 4a802b6..0a65db7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -57,11 +57,6 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - "@octokit/auth-token@^2.4.4": version "2.4.5" resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.5.tgz#568ccfb8cb46f36441fac094ce34f7a875b197f3" @@ -1565,6 +1560,11 @@ through@2, "through@>=2.2.7 <3": resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= +toml-patch@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/toml-patch/-/toml-patch-0.2.3.tgz#5d6e5884755089c06622aa5fee7458d995520e8a" + integrity sha512-SQI/j7f2S/iQZmoWBWNUMTfVmPbhP3+YCqxB/q8iOjdnXBk29HDARHzjCazmwftZaylYuMSrTTrvJiESVyzJzw== + trim-newlines@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613"