diff --git a/README.md b/README.md index 507322e8..0c72b057 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ from MySQL to PostgreSQL as easy and smooth as possible.

SYSTEM REQUIREMENTS

USAGE

@@ -38,22 +38,35 @@ from MySQL to PostgreSQL as easy and smooth as possible.

4. Go to Nmig directory, install dependencies, compile and run the app
-     Sample:
+ Sample:

$ cd /path/to/nmig

$ npm install

$ npm run build

$ npm start

-

+ Or, if you have moved config folder out from Nmig's directory:

+
npm start -- --conf-dir='/path/to/nmig_config' --logs-dir='/path/to/nmig_logs'

5. If a disaster took place during migration (for what ever reason) - simply restart the process -$ npm start
    NMIG will restart from the point it was stopped at. +$ npm start
+Or, if you have moved config folder out from Nmig's directory:
+$ npm start -- --conf-dir='/path/to/nmig_config' --logs-dir='/path/to/nmig_logs'
+ +    NMIG will restart from the point it was stopped at.

6. At the end of migration check log files, if necessary.
    Log files will be located under "logs_directory" folder in the root of the package.
    + Note: If you've created nmig_logs folder outside the nmig's directory than "logs_directory" will reside in nmig_logs. +
Note: "logs_directory" will be created during script execution.

@@ -65,7 +78,7 @@ from MySQL to PostgreSQL as easy and smooth as possible.

1. Create a new PostgreSQL database.
Sample:  CREATE DATABASE nmig_test_db;

-

2. Download Nmig package.
Sample: /path/to/nmig

+

2. Download Nmig package.
Sample: /path/to/nmig

3. Edit configuration file located at /path/to/nmig/config/test_config.json with correct details.

Notes:

4. Go to nmig directory, install dependencies, compile and run tests
-     Sample:
+ Sample:

$ cd /path/to/nmig

$ npm install

$ npm run build

$ npm test

+ Or, if you have moved config folder out from Nmig's directory:

+
npm test -- --conf-dir='/path/to/nmig_config' --logs-dir='/path/to/nmig_logs'

5. At the end of migration check log files, if necessary.
    Log files will be located under "logs_directory" folder in the root of the package.
    -Note: "logs_directory" will be created during script execution.

+Note: If you've created nmig_logs folder outside the nmig's directory than "logs_directory" will reside in nmig_logs. +
Note: "logs_directory" will be created during script execution.

VERSION

-

Current version is 5.2.0
-(major version . improvements . bug fixes)

+

Current version is 5.3.0

LICENSE

NMIG is available under "GNU GENERAL PUBLIC LICENSE" (v. 3)
diff --git a/config/config.json b/config/config.json index 8dd6b37d..36405ca6 100644 --- a/config/config.json +++ b/config/config.json @@ -35,13 +35,13 @@ "password" : "0123456789" }, - "max_db_connection_pool_size_description" : [ - "Maximal amount of simultaneous connections to your MySQL and PostgreSQL servers.", - "For example: 10 means, that nmig will create a pool of 10 connections to MySQL server", - "and 10 connections to PostgreSQL server.", - "20 connections to both servers." + "max_each_db_connection_pool_size_description" : [ + "Maximal amount of simultaneous connections to your MySQL and PostgreSQL servers each.", + "Example: 20 means, that Nmig will create a pool of 20 connections to the source MySQL server", + "and 20 connections to the target PostgreSQL server.", + "40 connections to both servers." ], - "max_db_connection_pool_size" : 10, + "max_each_db_connection_pool_size" : 20, "loader_max_old_space_size_description" : [ "V8 memory limit of the loader process.", @@ -69,15 +69,6 @@ ], "schema" : "public", - "no_vacuum_description" : [ - "PostgreSQL VACUUM reclaims storage occupied by dead tuples.", - "VACUUM is a very time-consuming procedure.", - "By default, VACUUM will be performed automatically after migration (recommended)", - "This behavior can be disabled for tables, included into the list (Array) below.", - "Table names, included into the list below should be names, you use in your source database (MySQL)." - ], - "no_vacuum" : [], - "exclude_tables_description": [ "List (Array) of tables, that will not be migrated.", "By default, nmig will migrate all tables." diff --git a/package-lock.json b/package-lock.json index bbbc2dc5..407524d7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,24 +5,24 @@ "requires": true, "dependencies": { "@types/mysql": { - "version": "2.15.8", - "resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.8.tgz", - "integrity": "sha512-l0TUdg6KDEaLO75/yjdjksobJDRWv8iZlpRfv/WW1lQZCQDKdTDnKCkeH10oapzP/JTuKiTy6Cvq/sm/0GgcUw==", + "version": "2.15.14", + "resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.14.tgz", + "integrity": "sha512-YyNcxZZ4qlb9v41lu8Qfo+quX8eUqz4mr/6Nx7sclc2pUemOvjU7xrpuVave0/sR/VdtDPTXZOWQLC/kthxrHg==", "dev": true, "requires": { "@types/node": "*" } }, "@types/node": { - "version": "13.1.7", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.1.7.tgz", - "integrity": "sha512-HU0q9GXazqiKwviVxg9SI/+t/nAsGkvLDkIdxz+ObejG2nX6Si00TeLqHMoS+a/1tjH7a8YpKVQwtgHuMQsldg==", + "version": "14.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.14.tgz", + "integrity": "sha512-syUgf67ZQpaJj01/tRTknkMNoBBLWJOBODF0Zm4NrXmiSuxjymFrxnTu1QVYRubhVkRcZLYZG8STTwJRdVm/WQ==", "dev": true }, "@types/pg": { - "version": "7.14.1", - "resolved": "https://registry.npmjs.org/@types/pg/-/pg-7.14.1.tgz", - "integrity": "sha512-gQgg4bLuykokypx4O1fwEzl5e6UjjyaBtN3znn5zhm0YB9BnKyHDw+e4cQY9rAPzpdM2qpJbn9TNzUazbmTsdw==", + "version": "7.14.3", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-7.14.3.tgz", + "integrity": "sha512-go5zddQ1FrUQHeBvqPzQ1svKo4KKucSwvqLsvwc/EIuQ9sxDA21b68xc/RwhzAK5pPCnez8NrkYatFIGdJBVvA==", "dev": true, "requires": { "@types/node": "*", @@ -36,14 +36,29 @@ "dev": true }, "@types/tape": { - "version": "4.2.33", - "resolved": "https://registry.npmjs.org/@types/tape/-/tape-4.2.33.tgz", - "integrity": "sha512-ltfyuY5BIkYlGuQfwqzTDT8f0q8Z5DGppvUnWGs39oqDmMd6/UWhNpX3ZMh/VYvfxs3rFGHMrLC/eGRdLiDGuw==", + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/@types/tape/-/tape-4.13.0.tgz", + "integrity": "sha512-0V8cKowBdsiA9nbxAg7531sF2cdPZNiUogcfIUeUGm+bejUBE/bvibz3rH36iQP9bQjO/sOzFwU97/uC5mCyoA==", "dev": true, "requires": { "@types/node": "*" } }, + "array-filter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-1.0.0.tgz", + "integrity": "sha1-uveeYubvTCpMC4MSMtr/7CUfnYM=", + "dev": true + }, + "available-typed-arrays": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.2.tgz", + "integrity": "sha512-XWX3OX8Onv97LMk/ftVyBibpGwY5a8SmuxZPzeOxqmuEqUCOM9ZE+uIaD1VNJ5QnvU2UQusvmKbuM1FR8QWGfQ==", + "dev": true, + "requires": { + "array-filter": "^1.0.0" + } + }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -51,9 +66,9 @@ "dev": true }, "bignumber.js": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", - "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==" + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" }, "brace-expansion": { "version": "1.1.11", @@ -71,9 +86,9 @@ "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" }, "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==" }, "concat-map": { "version": "0.0.1", @@ -87,17 +102,33 @@ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "deep-equal": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.1.1.tgz", - "integrity": "sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.0.3.tgz", + "integrity": "sha512-Spqdl4H+ky45I9ByyJtXteOm9CaIrPmnIPmOhrkKGNYWeDgCvJ8jNYVCTjChxW4FqGuZnLHADc8EKRMX6+CgvA==", "dev": true, "requires": { + "es-abstract": "^1.17.5", + "es-get-iterator": "^1.1.0", "is-arguments": "^1.0.4", - "is-date-object": "^1.0.1", - "is-regex": "^1.0.4", - "object-is": "^1.0.1", + "is-date-object": "^1.0.2", + "is-regex": "^1.0.5", + "isarray": "^2.0.5", + "object-is": "^1.1.2", "object-keys": "^1.1.1", - "regexp.prototype.flags": "^1.2.0" + "object.assign": "^4.1.0", + "regexp.prototype.flags": "^1.3.0", + "side-channel": "^1.0.2", + "which-boxed-primitive": "^1.0.1", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.2" + }, + "dependencies": { + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + } } }, "define-properties": { @@ -125,22 +156,45 @@ } }, "es-abstract": { - "version": "1.17.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.2.tgz", - "integrity": "sha512-YoKuru3Lyoy7yVTBSH2j7UxTqe/je3dWAruC0sHvZX1GNd5zX8SSLvQqEgO9b3Ex8IW+goFI9arEEsFIbulhOw==", + "version": "1.17.6", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.6.tgz", + "integrity": "sha512-Fr89bON3WFyUi5EvAeI48QTWX0AyekGgLA8H+c+7fbfCkJwRWRMLd8CQedNEyJuoYYhmtEqY92pgte1FAhBlhw==", "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1", - "is-callable": "^1.1.5", - "is-regex": "^1.0.5", + "is-callable": "^1.2.0", + "is-regex": "^1.1.0", "object-inspect": "^1.7.0", "object-keys": "^1.1.1", "object.assign": "^4.1.0", - "string.prototype.trimleft": "^2.1.1", - "string.prototype.trimright": "^2.1.1" + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "dev": true, + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + }, + "dependencies": { + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + } } }, "es-to-primitive": { @@ -163,6 +217,12 @@ "is-callable": "^1.1.3" } }, + "foreach": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", + "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k=", + "dev": true + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -225,10 +285,22 @@ "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==", "dev": true }, + "is-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.0.tgz", + "integrity": "sha512-t5mGUXC/xRheCK431ylNiSkGGpBp8bHENBcENTkDT6ppwPzEVxNGZRvgvmOEfbWkFhA7D2GEuE2mmQTr78sl2g==", + "dev": true + }, + "is-boolean-object": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.0.1.tgz", + "integrity": "sha512-TqZuVwa/sppcrhUCAYkGBk7w0yxfQQnxq28fjkO53tnK9FQXmdwz2JS5+GjsWQ6RByES1K40nI+yDic5c9/aAQ==", + "dev": true + }, "is-callable": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.0.tgz", + "integrity": "sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw==", "dev": true }, "is-date-object": { @@ -237,15 +309,39 @@ "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", "dev": true }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==", + "dev": true + }, + "is-number-object": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz", + "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==", + "dev": true + }, "is-regex": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", - "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.0.tgz", + "integrity": "sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw==", "dev": true, "requires": { - "has": "^1.0.3" + "has-symbols": "^1.0.1" } }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==", + "dev": true + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, "is-symbol": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", @@ -255,17 +351,41 @@ "has-symbols": "^1.0.1" } }, + "is-typed-array": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.3.tgz", + "integrity": "sha512-BSYUBOK/HJibQ30wWkWold5txYwMUXQct9YHAQJr8fSwvZoiglcqB0pd7vEN23+Tsi9IUEjztdOSzl4qLVYGTQ==", + "dev": true, + "requires": { + "available-typed-arrays": "^1.0.0", + "es-abstract": "^1.17.4", + "foreach": "^2.0.5", + "has-symbols": "^1.0.1" + } + }, + "is-weakmap": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", + "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", + "dev": true + }, + "is-weakset": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.1.tgz", + "integrity": "sha512-pi4vhbhVHGLxohUw7PhGsueT4vRGFoXhP7+RGN0jKIv9+8PWYCQTqtADngrxOm2g46hoH0+g8uZZBzMrvVGDmw==", + "dev": true + }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, "json2csv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-4.5.4.tgz", - "integrity": "sha512-YxBhY4Lmn8IvVZ36nqg5omxneLy9JlorkqW1j/EDCeqvmi+CQ4uM+wsvXlcIqvGDewIPXMC/O/oF8DX9EH5aoA==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-5.0.1.tgz", + "integrity": "sha512-QFMifUX1y8W2tKi2TwZpnzf2rHdZvzdmgZUMEMDF46F90f4a9mUeWfx/qg4kzXSZYJYc3cWA5O+eLXk5lj9g8g==", "requires": { - "commander": "^2.15.1", + "commander": "^5.0.0", "jsonparse": "^1.3.1", "lodash.get": "^4.4.2" } @@ -290,33 +410,37 @@ } }, "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true }, "mysql": { - "version": "2.17.1", - "resolved": "https://registry.npmjs.org/mysql/-/mysql-2.17.1.tgz", - "integrity": "sha512-7vMqHQ673SAk5C8fOzTG2LpPcf3bNt0oL3sFpxPEEFp1mdlDcrLK0On7z8ZYKaaHrHwNcQ/MTUz7/oobZ2OyyA==", + "version": "2.18.1", + "resolved": "https://registry.npmjs.org/mysql/-/mysql-2.18.1.tgz", + "integrity": "sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==", "requires": { - "bignumber.js": "7.2.1", - "readable-stream": "2.3.6", + "bignumber.js": "9.0.0", + "readable-stream": "2.3.7", "safe-buffer": "5.1.2", "sqlstring": "2.3.1" } }, "object-inspect": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", + "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==", "dev": true }, "object-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.0.2.tgz", - "integrity": "sha512-Epah+btZd5wrrfjkJZq1AOB9O6OxUQto45hzFd7lXGrpHPGE0W1k+426yrZV+k6NJOzLNNW/nVsmZdIWsAqoOQ==", - "dev": true + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.2.tgz", + "integrity": "sha512-5lHCz+0uufF6wZ7CRFWJN3hp8Jqblpgve06U5CMQ3f//6iDjPr2PEo9MWCjEssDsa+UZEL4PkFpr+BMop6aKzQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } }, "object-keys": { "version": "1.1.1", @@ -363,24 +487,24 @@ "dev": true }, "pg": { - "version": "7.17.1", - "resolved": "https://registry.npmjs.org/pg/-/pg-7.17.1.tgz", - "integrity": "sha512-SYWEip6eADsgDQIZk0bmB2JDOrC8Xu6z10KlhlXl03NSomwVmHB6ZTVyDCwOfT6bXHI8QndJdk5XxSSRXikaSA==", + "version": "8.2.1", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.2.1.tgz", + "integrity": "sha512-DKzffhpkWRr9jx7vKxA+ur79KG+SKw+PdjMb1IRhMiKI9zqYUGczwFprqy+5Veh/DCcFs1Y6V8lRLN5I1DlleQ==", "requires": { "buffer-writer": "2.0.0", "packet-reader": "1.0.0", - "pg-connection-string": "0.1.3", - "pg-packet-stream": "^1.1.0", - "pg-pool": "^2.0.9", + "pg-connection-string": "^2.2.3", + "pg-pool": "^3.2.1", + "pg-protocol": "^1.2.4", "pg-types": "^2.1.0", "pgpass": "1.x", "semver": "4.3.2" } }, "pg-connection-string": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz", - "integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc=" + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.2.3.tgz", + "integrity": "sha512-I/KCSQGmOrZx6sMHXkOs2MjddrYcqpza3Dtsy0AjIgBr/bZiPJRK9WhABXN1Uy1UDazRbi9gZEzO2sAhL5EqiQ==" }, "pg-copy-streams": { "version": "2.2.2", @@ -392,15 +516,15 @@ "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" }, - "pg-packet-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/pg-packet-stream/-/pg-packet-stream-1.1.0.tgz", - "integrity": "sha512-kRBH0tDIW/8lfnnOyTwKD23ygJ/kexQVXZs7gEyBljw4FYqimZFxnMMx50ndZ8In77QgfGuItS5LLclC2TtjYg==" - }, "pg-pool": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-2.0.9.tgz", - "integrity": "sha512-gNiuIEKNCT3OnudQM2kvgSnXsLkSpd6mS/fRnqs6ANtrke6j8OY5l9mnAryf1kgwJMWLg0C1N1cYTZG1xmEYHQ==" + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.1.tgz", + "integrity": "sha512-BQDPWUeKenVrMMDN9opfns/kZo4lxmSWhIqo+cSAF7+lfi9ZclQbr9vfnlNaPr8wYF3UYjm5X0yPAhbcgqNOdA==" + }, + "pg-protocol": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.2.4.tgz", + "integrity": "sha512-/8L/G+vW/VhWjTGXpGh8XVkXOFx1ZDY+Yuz//Ab8CfjInzFkreI+fDG3WjCeSra7fIZwAFxzbGptNbm8xSXenw==" }, "pg-types": { "version": "2.2.0", @@ -433,9 +557,9 @@ "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" }, "postgres-date": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.4.tgz", - "integrity": "sha512-bESRvKVuTrjoBluEcpv2346+6kgB7UlnqWZsnbnCccTNq/pqfj1j6oBaN5+b/NrDXepYUT/HKadqv3iS9lJuVA==" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.5.tgz", + "integrity": "sha512-pdau6GRPERdAYUQwkBnGKxEfPyhVZXG/JiS44iZWiNdSOWE09N2lUgN6yshuq6fVSon4Pm0VMXd1srUUkLe9iA==" }, "postgres-interval": { "version": "1.2.0", @@ -451,9 +575,9 @@ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -475,9 +599,9 @@ } }, "resolve": { - "version": "1.14.2", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.14.2.tgz", - "integrity": "sha512-EjlOBLBO1kxsUxsKjLt7TAECyKW6fOh1VRkykQkKGzcBbjjPIxBqGh0jf7GJ3k/f5mxMqW3htMD3WdTUVtW8HQ==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", "dev": true, "requires": { "path-parse": "^1.0.6" @@ -502,6 +626,16 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz", "integrity": "sha1-x6BxWKgL7dBSNVt3DYLWZA+AO+c=" }, + "side-channel": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz", + "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "object-inspect": "^1.7.0" + } + }, "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", @@ -526,24 +660,24 @@ "function-bind": "^1.1.1" } }, - "string.prototype.trimleft": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz", - "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==", + "string.prototype.trimend": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", + "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", "dev": true, "requires": { "define-properties": "^1.1.3", - "function-bind": "^1.1.1" + "es-abstract": "^1.17.5" } }, - "string.prototype.trimright": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz", - "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==", + "string.prototype.trimstart": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", + "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", "dev": true, "requires": { "define-properties": "^1.1.3", - "function-bind": "^1.1.1" + "es-abstract": "^1.17.5" } }, "string_decoder": { @@ -555,26 +689,28 @@ } }, "tape": { - "version": "4.13.0", - "resolved": "https://registry.npmjs.org/tape/-/tape-4.13.0.tgz", - "integrity": "sha512-J/hvA+GJnuWJ0Sj8Z0dmu3JgMNU+MmusvkCT7+SN4/2TklW18FNCp/UuHIEhPZwHfy4sXfKYgC7kypKg4umbOw==", - "dev": true, - "requires": { - "deep-equal": "~1.1.1", - "defined": "~1.0.0", - "dotignore": "~0.1.2", - "for-each": "~0.3.3", - "function-bind": "~1.1.1", - "glob": "~7.1.6", - "has": "~1.0.3", - "inherits": "~2.0.4", - "is-regex": "~1.0.5", - "minimist": "~1.2.0", - "object-inspect": "~1.7.0", - "resolve": "~1.14.2", - "resumer": "~0.0.0", - "string.prototype.trim": "~1.2.1", - "through": "~2.3.8" + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/tape/-/tape-5.0.1.tgz", + "integrity": "sha512-wVsOl2shKPcjdJdc8a+PwacvrOdJZJ57cLUXlxW4TQ2R6aihXwG0m0bKm4mA4wjtQNTaLMCrYNEb4f9fjHKUYQ==", + "dev": true, + "requires": { + "deep-equal": "^2.0.3", + "defined": "^1.0.0", + "dotignore": "^0.1.2", + "for-each": "^0.3.3", + "function-bind": "^1.1.1", + "glob": "^7.1.6", + "has": "^1.0.3", + "inherits": "^2.0.4", + "is-regex": "^1.0.5", + "minimist": "^1.2.5", + "object-inspect": "^1.7.0", + "object-is": "^1.1.2", + "object.assign": "^4.1.0", + "resolve": "^1.17.0", + "resumer": "^0.0.0", + "string.prototype.trim": "^1.2.1", + "through": "^2.3.8" } }, "through": { @@ -583,9 +719,9 @@ "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "typescript": { - "version": "3.7.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.5.tgz", - "integrity": "sha512-/P5lkRXkWHNAbcJIiHPfRoKqyd7bsyCma1hZNUGfn20qm64T6ZBlrzprymeu918H+mB/0rIg2gGK/BXkhhYgBw==", + "version": "3.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.5.tgz", + "integrity": "sha512-hSAifV3k+i6lEoCJ2k6R2Z/rp/H3+8sdmcn5NrS3/3kE7+RyZXm9aqvxWqjEXHAd8b0pShatpcdMTvEdvAJltQ==", "dev": true }, "util-deprecate": { @@ -593,6 +729,45 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" }, + "which-boxed-primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.1.tgz", + "integrity": "sha512-7BT4TwISdDGBgaemWU0N0OU7FeAEJ9Oo2P1PHRm/FCWoEi2VLWC9b6xvxAA3C/NMpxg3HXVgi0sMmGbNUbNepQ==", + "dev": true, + "requires": { + "is-bigint": "^1.0.0", + "is-boolean-object": "^1.0.0", + "is-number-object": "^1.0.3", + "is-string": "^1.0.4", + "is-symbol": "^1.0.2" + } + }, + "which-collection": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", + "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "dev": true, + "requires": { + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-weakmap": "^2.0.1", + "is-weakset": "^2.0.1" + } + }, + "which-typed-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.2.tgz", + "integrity": "sha512-KT6okrd1tE6JdZAy3o2VhMoYPh3+J6EMZLyrxBQsZflI1QCZIxMrIYLkosd8Twf+YfknVIHmYQPgJt238p8dnQ==", + "dev": true, + "requires": { + "available-typed-arrays": "^1.0.2", + "es-abstract": "^1.17.5", + "foreach": "^2.0.5", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.1", + "is-typed-array": "^1.1.3" + } + }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", diff --git a/package.json b/package.json index fcee297b..8cef5759 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "nmig", - "version": "5.2.0", + "version": "5.3.0", "description": "The database migration app", "author": "Anatoly Khaytovich", "license": "GPL-3.0", @@ -9,21 +9,21 @@ "url": "https://github.com/AnatolyUss/nmig.git" }, "engines": { - "node": ">=8.0.0" + "node": ">=10.0.0" }, "dependencies": { - "json2csv": "^4.5.4", - "mysql": "^2.17.1", - "pg": "^7.17.1", + "json2csv": "^5.0.1", + "mysql": "^2.18.1", + "pg": "^8.2.1", "pg-copy-streams": "^2.2.2" }, "devDependencies": { - "@types/mysql": "^2.15.8", - "@types/node": "^13.1.7", - "@types/pg": "^7.14.1", - "@types/tape": "^4.2.33", - "tape": "^4.13.0", - "typescript": "^3.7.5" + "@types/mysql": "^2.15.14", + "@types/node": "^14.0.14", + "@types/pg": "^7.14.3", + "@types/tape": "^4.13.0", + "tape": "^5.0.1", + "typescript": "^3.9.5" }, "scripts": { "build": "tsc", diff --git a/src/BootProcessor.ts b/src/BootProcessor.ts index 9264dac2..44512c99 100644 --- a/src/BootProcessor.ts +++ b/src/BootProcessor.ts @@ -18,11 +18,13 @@ * * @author Anatoly Khaytovich */ +import * as path from 'path'; import Conversion from './Conversion'; import DBAccess from './DBAccess'; import DBAccessQueryResult from './DBAccessQueryResult'; import DBVendors from './DBVendors'; import IDBAccessQueryParams from './IDBAccessQueryParams'; +import IConfAndLogsPaths from './IConfAndLogsPaths'; import { getStateLogsTableName } from './MigrationStateManager'; /** @@ -95,24 +97,50 @@ export function boot(conversion: Conversion): Promise { console.log(logo + message); - process - .stdin - .resume() - .setEncoding(conversion._encoding) - .on('data', (stdin: string) => { - const trimedStdin: string = stdin.trim(); + const _getUserInput = (input: string) => { + const trimedInput: string = input.trim(); + + if (trimedInput === 'n' || trimedInput === 'N') { + console.log('\t--[boot] Migration aborted.\n'); + process.exit(0); + } + + if (trimedInput === 'y' || trimedInput === 'Y') { + process.stdin.removeListener('data', _getUserInput); + conversion._timeBegin = new Date(); + return resolve(conversion); + } - if (trimedStdin === 'n' || trimedStdin === 'N') { - console.log('\t--[boot] Migration aborted.\n'); - process.exit(0); - } + const hint: string = `\t--[boot] Unexpected input ${ trimedInput }\n` + + `\t--[boot] Expected input is upper case Y\n\t--[boot] or lower case n\n${message}`; - if (trimedStdin === 'y' || trimedStdin === 'Y') { - return resolve(conversion); - } + console.log(hint); + }; - const hint: string = `\t--[boot] Unexpected input ${ trimedStdin }\n\t--[boot] Expected input is upper case Y\n\t--[boot] or lower case n\n${ message }`; - console.log(hint); - }); + process.stdin + .resume() + .setEncoding(conversion._encoding) + .on('data', _getUserInput); }); } + +/** + * Parses CLI input arguments, if given. + * Returns an object containing paths to configuration files and to logs directory. + * + * Sample: + * npm start -- --conf-dir='C:\Users\anatolyuss\Documents\projects\nmig_config' --logs-dir='C:\Users\anatolyuss\Documents\projects\nmig_logs' + * npm test -- --conf-dir='C:\Users\anatolyuss\Documents\projects\nmig_config' --logs-dir='C:\Users\anatolyuss\Documents\projects\nmig_logs' + */ +export function getConfAndLogsPaths(): IConfAndLogsPaths { + const baseDir: string = path.join(__dirname, '..', '..'); + const _parseInputArguments = (paramName: string) => { + const _path: string | undefined = process.argv.find((arg: string) => arg.startsWith(paramName)); + return _path ? _path.split('=')[1] : undefined; + }; + + return { + confPath: _parseInputArguments('--conf-dir') || path.join(baseDir, 'config'), + logsPath: _parseInputArguments('--logs-dir') || baseDir + }; +} diff --git a/src/ColumnsDataArranger.ts b/src/ColumnsDataArranger.ts index 6e1e2bf2..5700de29 100644 --- a/src/ColumnsDataArranger.ts +++ b/src/ColumnsDataArranger.ts @@ -18,6 +18,7 @@ * * @author Anatoly Khaytovich */ +import { Encoding } from './Encoding'; /** * Defines if given type is one of MySQL spacial types. @@ -50,10 +51,22 @@ const isDateTime = (type: string): boolean => { return type.indexOf('timestamp') !== -1 || type.indexOf('date') !== -1; }; +/** + * Defines if given type is one of MySQL numeric types. + */ +const isNumeric = (type: string): boolean => { + return type.indexOf('decimal') !== -1 + || type.indexOf('numeric') !== -1 + || type.indexOf('double') !== -1 + || type.indexOf('float') !== -1 + || type.indexOf('int') !== -1 + || type.indexOf('point') !== -1; +}; + /** * Arranges columns data before loading. */ -export default (arrTableColumns: any[], mysqlVersion: string | number): string => { +export default (arrTableColumns: any[], mysqlVersion: string | number, encoding: Encoding): string => { let strRetVal: string = ''; const wkbFunc: string = mysqlVersion >= 5.76 ? 'ST_AsWKB' : 'AsWKB'; @@ -70,10 +83,14 @@ export default (arrTableColumns: any[], mysqlVersion: string | number): string = strRetVal += `BIN(\`${ field }\`) AS \`${ field }\`,`; } else if (isDateTime(type)) { strRetVal += `IF(\`${ field }\` IN('0000-00-00', '0000-00-00 00:00:00'), '-INFINITY', CAST(\`${ field }\` AS CHAR)) AS \`${ field }\`,`; + } else if (isNumeric(type)) { + strRetVal += `\`${ field }\` AS \`${ field }\`,`; + } else if (encoding === 'utf-8' || encoding === 'utf8') { + strRetVal += `REPLACE(\`${ field }\`, '\0', '') AS \`${ field }\`,`; } else { strRetVal += `\`${ field }\` AS \`${ field }\`,`; } }); return strRetVal.slice(0, -1); -} +}; diff --git a/src/ConstraintsProcessor.ts b/src/ConstraintsProcessor.ts index 1dd13de6..e9c8f01f 100644 --- a/src/ConstraintsProcessor.ts +++ b/src/ConstraintsProcessor.ts @@ -19,10 +19,7 @@ * @author Anatoly Khaytovich */ import * as sequencesProcessor from './SequencesProcessor'; -import * as dataPoolManager from './DataPoolManager'; -import runVacuumFullAndAnalyze from './VacuumProcessor'; import * as migrationStateManager from './MigrationStateManager'; -import generateReport from './ReportGenerator'; import processEnum from './EnumProcessor'; import processNull from './NullProcessor'; import processDefault from './DefaultProcessor'; @@ -35,26 +32,17 @@ import Conversion from './Conversion'; /** * Continues migration process after data loading. */ -export default async function(conversion: Conversion): Promise { +export async function processConstraints(conversion: Conversion): Promise { const isTableConstraintsLoaded: boolean = await migrationStateManager.get(conversion, 'per_table_constraints_loaded'); const migrateOnlyData: boolean = conversion.shouldMigrateOnlyData(); - const promises: Promise[] = conversion._tablesToMigrate.map(async (tableName: string) => { - if (!isTableConstraintsLoaded) { - if (migrateOnlyData) { - return sequencesProcessor.setSequenceValue(conversion, tableName); - } + if (!isTableConstraintsLoaded) { + const promises: Promise[] = conversion._tablesToMigrate.map(async (tableName: string) => { + await processConstraintsPerTable(conversion, tableName, migrateOnlyData); + }); - await processEnum(conversion, tableName); - await processNull(conversion, tableName); - await processDefault(conversion, tableName); - await sequencesProcessor.createSequence(conversion, tableName); - await processIndexAndKey(conversion, tableName); - await processComments(conversion, tableName); - } - }); - - await Promise.all(promises); + await Promise.all(promises); + } if (migrateOnlyData) { await migrationStateManager.set(conversion, 'per_table_constraints_loaded', 'foreign_keys_loaded', 'views_loaded'); @@ -66,10 +54,25 @@ export default async function(conversion: Conversion): Promise { await migrationStateManager.set(conversion, 'views_loaded'); } - await runVacuumFullAndAnalyze(conversion); // Reclaim storage occupied by dead tuples. + return conversion; +} + +/** + * Processes given table's constraints. + */ +export async function processConstraintsPerTable( + conversion: Conversion, + tableName: string, + migrateOnlyData: boolean +): Promise { + if (migrateOnlyData) { + return sequencesProcessor.setSequenceValue(conversion, tableName); + } - // !!!Note, dropping of data-pool and state-logs tables MUST be the last step of migration process. - await dataPoolManager.dropDataPoolTable(conversion); - await migrationStateManager.dropStateLogsTable(conversion); - generateReport(conversion, 'NMIG migration is accomplished.'); + await processEnum(conversion, tableName); + await processNull(conversion, tableName); + await processDefault(conversion, tableName); + await sequencesProcessor.createSequence(conversion, tableName); + await processIndexAndKey(conversion, tableName); + await processComments(conversion, tableName); } diff --git a/src/Conversion.ts b/src/Conversion.ts index ebadf556..9a4accd9 100644 --- a/src/Conversion.ts +++ b/src/Conversion.ts @@ -46,9 +46,9 @@ export default class Conversion { public _loaderMaxOldSpaceSize: number | string; /** - * Maximal amount of simultaneous connections to your MySQL and PostgreSQL servers. + * Maximal amount of simultaneous connections to your MySQL and PostgreSQL servers each. */ - public readonly _maxDbConnectionPoolSize: number; + public readonly _maxEachDbConnectionPoolSize: number; /** * JavaScript encoding type. @@ -95,11 +95,6 @@ export default class Conversion { */ public readonly _notCreatedViewsPath: string; - /** - * A list of tables, to which PostgreSQL's VACUUM will not be applied at the end of migration. - */ - public readonly _noVacuum: string[]; - /** * List of tables, that will not be migrated. */ @@ -113,7 +108,7 @@ export default class Conversion { /** * The timestamp, at which the migration began. */ - public readonly _timeBegin: Date; + public _timeBegin: Date | null; /** * Current version of source (MySQL) db. @@ -208,10 +203,9 @@ export default class Conversion { this._allLogsPath = path.join(this._logsDirPath, 'all.log'); this._errorLogsPath = path.join(this._logsDirPath, 'errors-only.log'); this._notCreatedViewsPath = path.join(this._logsDirPath, 'not_created_views'); - this._noVacuum = this._config.no_vacuum === undefined ? [] : this._config.no_vacuum; this._excludeTables = this._config.exclude_tables === undefined ? [] : this._config.exclude_tables; this._includeTables = this._config.include_tables === undefined ? [] : this._config.include_tables; - this._timeBegin = new Date(); + this._timeBegin = null; this._encoding = this._config.encoding === undefined ? 'utf8' : this._config.encoding; this._0777 = '0777'; this._mysqlVersion = '5.6.21'; // Simply a default value. @@ -227,15 +221,15 @@ export default class Conversion { ? this._mySqlDbName : this._config.schema; - this._maxDbConnectionPoolSize = this._config.max_db_connection_pool_size !== undefined && Conversion._isIntNumeric(this._config.max_db_connection_pool_size) - ? +this._config.max_db_connection_pool_size - : 10; + this._maxEachDbConnectionPoolSize = this._config.max_each_db_connection_pool_size !== undefined && Conversion._isIntNumeric(this._config.max_each_db_connection_pool_size) + ? +this._config.max_each_db_connection_pool_size + : 20; this._runsInTestMode = false; this._eventEmitter = null; this._migrationCompletedEvent = 'migrationCompleted'; this._removeTestResources = this._config.remove_test_resources === undefined ? true : this._config.remove_test_resources; - this._maxDbConnectionPoolSize = this._maxDbConnectionPoolSize > 0 ? this._maxDbConnectionPoolSize : 10; + this._maxEachDbConnectionPoolSize = this._maxEachDbConnectionPoolSize > 0 ? this._maxEachDbConnectionPoolSize : 20; this._loaderMaxOldSpaceSize = this._config.loader_max_old_space_size; this._loaderMaxOldSpaceSize = Conversion._isIntNumeric(this._loaderMaxOldSpaceSize) ? this._loaderMaxOldSpaceSize : 'DEFAULT'; this._migrateOnlyData = this._config.migrate_only_data === undefined ? false : this._config.migrate_only_data; diff --git a/src/DBAccess.ts b/src/DBAccess.ts index 0b2289b0..6d5bd08a 100644 --- a/src/DBAccess.ts +++ b/src/DBAccess.ts @@ -21,7 +21,7 @@ import * as mysql from 'mysql'; import { MysqlError, Pool as MySQLPool, PoolConnection } from 'mysql'; import { Pool as PgPool, PoolClient } from 'pg'; -import { generateError } from './FsOps'; +import { log, generateError } from './FsOps'; import Conversion from './Conversion'; import DBVendors from './DBVendors'; import DBAccessQueryResult from './DBAccessQueryResult'; @@ -33,7 +33,7 @@ export default class DBAccess { */ private static async _getMysqlConnection(conversion: Conversion): Promise { if (!conversion._mysql) { - conversion._sourceConString.connectionLimit = conversion._maxDbConnectionPoolSize; + conversion._sourceConString.connectionLimit = conversion._maxEachDbConnectionPoolSize; conversion._sourceConString.multipleStatements = true; const pool: MySQLPool = mysql.createPool(conversion._sourceConString); @@ -51,7 +51,7 @@ export default class DBAccess { */ private static async _getPgConnection(conversion: Conversion): Promise { if (!conversion._pg) { - conversion._targetConString.max = conversion._maxDbConnectionPoolSize; + conversion._targetConString.max = conversion._maxEachDbConnectionPoolSize; const pool: PgPool = new PgPool(conversion._targetConString); if (!pool) { @@ -68,6 +68,41 @@ export default class DBAccess { } } + /** + * Closes both connection-pools. + */ + public static async closeConnectionPools(conversion: Conversion): Promise { + const closeMySqlConnections = () => { + return new Promise(resolve => { + if (conversion._mysql) { + conversion._mysql.end(async error => { + if (error) { + await generateError(conversion, `\t--[DBAccess::closeConnectionPools] ${ error }`); + } + + return resolve(); + }); + } + + resolve(); + }); + }; + + const closePgConnections = async () => { + if (conversion._pg) { + try { + await conversion._pg.end(); + } catch (error) { + await generateError(conversion, `\t--[DBAccess::closeConnectionPools] ${ error }`); + } + } + }; + + await Promise.all([closeMySqlConnections, closePgConnections]); + log(conversion, `\t--[DBAccess::closeConnectionPools] Closed all DB connections.`); + return conversion; + } + /** * Obtains PoolConnection instance. */ diff --git a/src/DataChunksProcessor.ts b/src/DataChunksProcessor.ts index 16192adf..fd540556 100644 --- a/src/DataChunksProcessor.ts +++ b/src/DataChunksProcessor.ts @@ -38,7 +38,7 @@ export default async (conversion: Conversion, tableName: string, haveDataChunksP const originalTableName: string = extraConfigProcessor.getTableName(conversion, tableName, true); const logTitle: string = 'DataChunksProcessor::default'; - const strSelectFieldList: string = arrangeColumnsData(conversion._dicTables[tableName].arrTableColumns, conversion._mysqlVersion); + const selectFieldList: string = arrangeColumnsData(conversion._dicTables[tableName].arrTableColumns, conversion._mysqlVersion, conversion._encoding); const sqlRowsCnt: string = `SELECT COUNT(1) AS rows_count FROM \`${ originalTableName }\`;`; const params: IDBAccessQueryParams = { conversion: conversion, @@ -53,7 +53,11 @@ export default async (conversion: Conversion, tableName: string, haveDataChunksP const rowsCnt: number = countResult.data[0].rows_count; const msg: string = `\t--[${ logTitle }] Total rows to insert into "${ conversion._schema }"."${ tableName }": ${ rowsCnt }`; log(conversion, msg, conversion._dicTables[tableName].tableLogPath); - const metadata: string = `{"_tableName":"${ tableName }","_selectFieldList":"${ strSelectFieldList }","_rowsCnt":${ rowsCnt }}`; + const metadata: string = JSON.stringify({ + _tableName: tableName, + _selectFieldList: selectFieldList, + _rowsCnt: rowsCnt, + }); params.sql = `INSERT INTO ${ getDataPoolTableName(conversion) }("metadata") VALUES ($1);`; params.vendor = DBVendors.PG; diff --git a/src/DataPipeManager.ts b/src/DataPipeManager.ts index a5930b3a..cf96d299 100644 --- a/src/DataPipeManager.ts +++ b/src/DataPipeManager.ts @@ -23,11 +23,11 @@ import { EventEmitter } from 'events'; import * as path from 'path'; import * as os from 'os'; import { log, generateError } from './FsOps'; +import { processConstraintsPerTable } from './ConstraintsProcessor'; +import * as migrationStateManager from './MigrationStateManager'; import Conversion from './Conversion'; import MessageToDataLoader from './MessageToDataLoader'; import MessageToMaster from './MessageToMaster'; -import processConstraints from './ConstraintsProcessor'; -import decodeBinaryData from './BinaryDataDecoder'; /** * A number of currently running loader processes. @@ -35,9 +35,9 @@ import decodeBinaryData from './BinaryDataDecoder'; let loaderProcessesCount: number = 0; /** - * "dataPoolEmpty" event. + * "tableLoadingFinished" event. */ -const dataPoolEmptyEvent: string = 'dataPoolEmpty'; +const tableLoadingFinishedEvent: string = 'tableLoadingFinished'; /** * An EventEmitter instance. @@ -45,39 +45,77 @@ const dataPoolEmptyEvent: string = 'dataPoolEmpty'; const eventEmitter: EventEmitter = new EventEmitter(); /** - * Runs the data pipe. + * A path to the DataLoader.js file. + * !!!Notice, in runtime it points to ../dist/src/DataLoader.js and not DataLoader.ts */ -export default async function(conversion: Conversion): Promise { - if (dataPoolProcessed(conversion)) { - await continueConversionProcess(conversion); - return; - } +const dataLoaderPath: string = path.join(__dirname, 'DataLoader.js'); - // Register a listener for the "dataPoolEmpty" event. - eventEmitter.on(dataPoolEmptyEvent, async () => { - if (loaderProcessesCount === 0) { - // On the event of "dataPoolEmpty" check a number of active loader processes. - // If no active loader processes found, then all the data is transferred, so Nmig can proceed to the next step. - await continueConversionProcess(conversion); - } - }); +/** + * Returns the options object, which intended to be used upon creation of the data loader process. + */ +function getDataLoaderOptions(conversion: Conversion): any { + const options: any = Object.create(null); - // Determine a number of simultaneously running loader processes. - // In most cases it will be a number of logical CPU cores on the machine running Nmig; - // unless a number of tables in the source database is smaller. - const numberOfSimultaneouslyRunningLoaderProcesses: number = Math.min(conversion._dataPool.length, getNumberOfCpus()); + if (conversion._loaderMaxOldSpaceSize !== 'DEFAULT') { + options.execArgv = [`--max-old-space-size=${ conversion._loaderMaxOldSpaceSize }`]; + } - for (let i: number = 0; i < numberOfSimultaneouslyRunningLoaderProcesses; ++i) { - runLoaderProcess(conversion); + return options; +} + +/** + * Kills a process specified by the pid. + */ +async function killProcess(pid: number, conversion: Conversion): Promise { + try { + process.kill(pid); + } catch (killError) { + await generateError(conversion, `\t--[killProcess] ${ killError }`); } } /** - * Continues the conversion process upon data transfer completion. + * Checks if all data chunks were processed. */ -async function continueConversionProcess(conversion: Conversion): Promise { - await decodeBinaryData(conversion); - await processConstraints(conversion); +function dataPoolProcessed(conversion: Conversion): boolean { + return conversion._dataPool.length === 0; +} + +/** + * Runs the data pipe. + */ +export default function(conversion: Conversion): Promise { + return new Promise(resolve => { + if (dataPoolProcessed(conversion)) { + return resolve(conversion); + } + + // Register a listener for the "tableLoadingFinished" event. + eventEmitter.on(tableLoadingFinishedEvent, async tableName => { + await processConstraintsPerTable(conversion, tableName, conversion.shouldMigrateOnlyData()); + + // Check a number of active loader processes on the event of "tableLoadingFinished". + // If no active loader processes found, then all the data is transferred, + // hence Nmig can proceed to the next step. + if (loaderProcessesCount === 0) { + await migrationStateManager.set(conversion, 'per_table_constraints_loaded'); + return resolve(conversion); + } + }); + + // Calculate a number of data-loader processes that will run simultaneously. + // In most cases it will be a number of logical CPU cores on the machine running Nmig; + // unless a number of tables in the source database or the maximal number of DB connections is smaller. + const numberOfSimultaneouslyRunningLoaderProcesses: number = Math.min( + conversion._dataPool.length, + conversion._maxEachDbConnectionPoolSize, + os.cpus().length + ); + + for (let i: number = 0; i < numberOfSimultaneouslyRunningLoaderProcesses; ++i) { + runLoaderProcess(conversion); + } + }); } /** @@ -85,75 +123,30 @@ async function continueConversionProcess(conversion: Conversion): Promise */ function runLoaderProcess(conversion: Conversion): void { if (dataPoolProcessed(conversion)) { - // Emit the "dataPoolEmpty" event if there are no more data to transfer. - eventEmitter.emit(dataPoolEmptyEvent); + // No more data to transfer. return; } - // Start a new data loader process. - const loaderProcess: ChildProcess = fork(getDataLoaderPath(), getDataLoaderOptions(conversion)); + // Start a new data-loader process. + const loaderProcess: ChildProcess = fork(dataLoaderPath, getDataLoaderOptions(conversion)); loaderProcessesCount++; loaderProcess.on('message', async (signal: MessageToMaster) => { // Following actions are performed when a message from the loader process is accepted: // 1. Log an info regarding the just-populated table. // 2. Kill the loader process to release unused RAM as quick as possible. - // 3. Call the "runLoaderProcess" function recursively to transfer next data-chunk. - const msg: string = `\t--[pipeData] For now inserted: ${ signal.totalRowsToInsert } rows,` + // 3. Emit the "tableLoadingFinished" event to start constraints creation for the just loaded table immediately. + // 4. Call the "runLoaderProcess" function recursively to transfer data to the next table. + const msg: string = `\t--[runLoaderProcess] For now inserted: ${ signal.totalRowsToInsert } rows,` + `Total rows to insert into "${ conversion._schema }"."${ signal.tableName }": ${ signal.totalRowsToInsert }`; log(conversion, msg); await killProcess(loaderProcess.pid, conversion); loaderProcessesCount--; + eventEmitter.emit(tableLoadingFinishedEvent, signal.tableName); runLoaderProcess(conversion); }); - // Sends a message to current data loader process, which contains configuration info and a metadata of next data-chunk. + // Sends a message to current data loader process, which contains configuration info and a metadata of the next data-chunk. loaderProcess.send(new MessageToDataLoader(conversion._config, conversion._dataPool.pop())); } - -/** - * Returns a path to the DataLoader.js file. - * !!!Note, in runtime it points to ../dist/src/DataLoader.js and not DataLoader.ts - */ -function getDataLoaderPath(): string { - return path.join(__dirname, 'DataLoader.js'); -} - -/** - * Returns the options object, which intended to be used upon creation of the data loader process. - */ -function getDataLoaderOptions(conversion: Conversion): any { - const options: any = Object.create(null); - - if (conversion._loaderMaxOldSpaceSize !== 'DEFAULT') { - options.execArgv = [`--max-old-space-size=${ conversion._loaderMaxOldSpaceSize }`]; - } - - return options; -} - -/** - * Returns a number of logical CPU cores. - */ -function getNumberOfCpus(): number { - return os.cpus().length; -} - -/** - * Kills a process specified by the pid. - */ -async function killProcess(pid: number, conversion: Conversion): Promise { - try { - process.kill(pid); - } catch (killError) { - await generateError(conversion, `\t--[killProcess] ${ killError }`); - } -} - -/** - * Checks if all data chunks were processed. - */ -function dataPoolProcessed(conversion: Conversion): boolean { - return conversion._dataPool.length === 0; -} diff --git a/src/DataPoolManager.ts b/src/DataPoolManager.ts index ec738dc8..af66224e 100644 --- a/src/DataPoolManager.ts +++ b/src/DataPoolManager.ts @@ -56,7 +56,7 @@ export async function createDataPoolTable(conversion: Conversion): Promise { +export async function dropDataPoolTable(conversion: Conversion): Promise { const logTitle: string = 'DataPoolManager::dropDataPoolTable'; const table: string = getDataPoolTableName(conversion); const params: IDBAccessQueryParams = { @@ -70,6 +70,7 @@ export async function dropDataPoolTable(conversion: Conversion): Promise { await DBAccess.query(params); log(conversion, `\t--[${ logTitle }] table ${ table } is dropped...`); + return conversion; } /** diff --git a/src/FsOps.ts b/src/FsOps.ts index ff60c5a7..7bfd9a11 100644 --- a/src/FsOps.ts +++ b/src/FsOps.ts @@ -49,7 +49,7 @@ export function generateError(conversion: Conversion, message: string, sql: stri * Writes given log to the "/all.log" file. * If necessary, writes given log to the "/{tableName}.log" file. */ -export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, tableLogPath?: string): void { +export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, tableLogPath?: string, callback?: Function): void { console.log(log); const buffer: Buffer = Buffer.from(`${ log }\n\n`, conversion._encoding); @@ -63,13 +63,22 @@ export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, fs.write(fd, buffer, 0, buffer.length, null, () => { fs.close(fd, () => { // Each async function MUST have a callback (according to Node.js >= 7). + if (callback) { + callback(); + } }); }); + } else if (callback) { + callback(error); } }); + } else if (callback) { + callback(); } }); }); + } else if (callback) { + callback(error); } }); } @@ -77,19 +86,19 @@ export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, /** * Reads the configuration file. */ -export function readConfig(baseDir: string, configFileName: string = 'config.json'): Promise { +export function readConfig(confPath: string, logsPath: string, configFileName: string = 'config.json'): Promise { return new Promise(resolve => { - const strPathToConfig = path.join(baseDir, 'config', configFileName); + const pathToConfig = path.join(confPath, configFileName); - fs.readFile(strPathToConfig, (error: ErrnoException | null, data: Buffer) => { + fs.readFile(pathToConfig, (error: ErrnoException | null, data: Buffer) => { if (error) { - console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ strPathToConfig }`); + console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ pathToConfig }`); process.exit(1); } const config: any = JSON.parse(data.toString()); - config.logsDirPath = path.join(baseDir, 'logs_directory'); - config.dataTypesMapAddr = path.join(baseDir, 'config', 'data_types_map.json'); + config.logsDirPath = path.join(logsPath, 'logs_directory'); + config.dataTypesMapAddr = path.join(confPath, 'data_types_map.json'); resolve(config); }); }); @@ -98,18 +107,18 @@ export function readConfig(baseDir: string, configFileName: string = 'config.jso /** * Reads the extra configuration file, if necessary. */ -export function readExtraConfig(config: any, baseDir: string): Promise { +export function readExtraConfig(config: any, confPath: string): Promise { return new Promise(resolve => { if (config.enable_extra_config !== true) { config.extraConfig = null; return resolve(config); } - const strPathToExtraConfig = path.join(baseDir, 'config', 'extra_config.json'); + const pathToExtraConfig = path.join(confPath, 'config', 'extra_config.json'); - fs.readFile(strPathToExtraConfig, (error: ErrnoException | null, data: Buffer) => { + fs.readFile(pathToExtraConfig, (error: ErrnoException | null, data: Buffer) => { if (error) { - console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ strPathToExtraConfig }`); + console.log(`\n\t--Cannot run migration\nCannot read configuration info from ${ pathToExtraConfig }`); process.exit(1); } diff --git a/src/IConfAndLogsPaths.ts b/src/IConfAndLogsPaths.ts new file mode 100644 index 00000000..3108ea1b --- /dev/null +++ b/src/IConfAndLogsPaths.ts @@ -0,0 +1,31 @@ +/* + * This file is a part of "NMIG" - the database migration tool. + * + * Copyright (C) 2016 - present, Anatoly Khaytovich + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program (please see the "LICENSE.md" file). + * If not, see . + * + * @author Anatoly Khaytovich + */ +export default interface IConfAndLogsPaths { + /** + * An absolute path to the configuration directory. + */ + confPath: string; + + /** + * An absolute path to the logs directory. + */ + logsPath: string; +} diff --git a/src/Main.ts b/src/Main.ts index af82a305..c104e75a 100644 --- a/src/Main.ts +++ b/src/Main.ts @@ -18,20 +18,24 @@ * * @author Anatoly Khaytovich */ -import * as path from 'path'; import Conversion from './Conversion'; import createSchema from './SchemaProcessor'; import loadStructureToMigrate from './StructureLoader'; import pipeData from './DataPipeManager'; -import { boot } from './BootProcessor'; -import { createStateLogsTable } from './MigrationStateManager'; +import decodeBinaryData from './BinaryDataDecoder'; +import generateReport from './ReportGenerator'; +import DBAccess from './DBAccess'; +import { dropDataPoolTable } from './DataPoolManager'; +import { processConstraints } from './ConstraintsProcessor'; +import { getConfAndLogsPaths, boot } from './BootProcessor'; +import { createStateLogsTable, dropStateLogsTable } from './MigrationStateManager'; import { createDataPoolTable, readDataPool } from './DataPoolManager'; import { readConfig, readExtraConfig, createLogsDirectory, readDataTypesMap } from './FsOps'; -const baseDir: string = path.join(__dirname, '..', '..'); +const { confPath, logsPath } = getConfAndLogsPaths(); -readConfig(baseDir) - .then(config => readExtraConfig(config, baseDir)) +readConfig(confPath, logsPath) + .then(config => readExtraConfig(config, confPath)) .then(Conversion.initializeConversion) .then(boot) .then(readDataTypesMap) @@ -42,4 +46,9 @@ readConfig(baseDir) .then(loadStructureToMigrate) .then(readDataPool) .then(pipeData) - .catch(error => console.log(error)); + .then(decodeBinaryData) + .then(processConstraints) + .then(dropDataPoolTable) + .then(dropStateLogsTable) + .then(DBAccess.closeConnectionPools) + .then(generateReport); diff --git a/src/MigrationStateManager.ts b/src/MigrationStateManager.ts index 5b71c6b1..a40c2b52 100644 --- a/src/MigrationStateManager.ts +++ b/src/MigrationStateManager.ts @@ -106,7 +106,7 @@ export async function createStateLogsTable(conversion: Conversion): Promise { +export async function dropStateLogsTable(conversion: Conversion): Promise { const params: IDBAccessQueryParams = { conversion: conversion, caller: 'MigrationStateManager::dropStateLogsTable', @@ -117,4 +117,5 @@ export async function dropStateLogsTable(conversion: Conversion): Promise }; await DBAccess.query(params); + return conversion; } diff --git a/src/ReportGenerator.ts b/src/ReportGenerator.ts index 2a43bce9..f1d9691c 100644 --- a/src/ReportGenerator.ts +++ b/src/ReportGenerator.ts @@ -18,15 +18,20 @@ * * @author Anatoly Khaytovich */ +import { EventEmitter } from 'events'; import { log } from './FsOps'; import Conversion from './Conversion'; -import { EventEmitter } from 'events'; /** * Generates a summary report. */ -export default (conversion: Conversion, endMsg: string): void => { - let differenceSec: number = ((new Date()).getTime() - conversion._timeBegin.getTime()) / 1000; +export default (conversion: Conversion): void => { + if (conversion._runsInTestMode) { + (conversion._eventEmitter).emit(conversion._migrationCompletedEvent); + return; + } + + let differenceSec: number = ((new Date()).getTime() - (conversion._timeBegin).getTime()) / 1000; const seconds: number = Math.floor(differenceSec % 60); differenceSec = differenceSec / 60; const minutes: number = Math.floor(differenceSec % 60); @@ -34,16 +39,10 @@ export default (conversion: Conversion, endMsg: string): void => { const formattedHours: string = hours < 10 ? `0${ hours }` : `${ hours }`; const formattedMinutes: string = minutes < 10 ? `0${ minutes }` : `${ minutes }`; const formattedSeconds: string = seconds < 10 ? `0${ seconds }` : `${ seconds }`; + const endMsg: string = 'NMIG migration is accomplished.'; const output: string = `\t--[generateReport] ${ endMsg } \n\t--[generateReport] Total time: ${ formattedHours }:${ formattedMinutes }:${ formattedSeconds } \n\t--[generateReport] (hours:minutes:seconds)`; - log(conversion, output); - - if (conversion._runsInTestMode) { - (conversion._eventEmitter).emit(conversion._migrationCompletedEvent); - return; - } - - process.exit(0); + log(conversion, output, undefined, () => process.exit(0)); } diff --git a/src/VacuumProcessor.ts b/src/VacuumProcessor.ts deleted file mode 100644 index 127d4406..00000000 --- a/src/VacuumProcessor.ts +++ /dev/null @@ -1,60 +0,0 @@ -/* - * This file is a part of "NMIG" - the database migration tool. - * - * Copyright (C) 2016 - present, Anatoly Khaytovich - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program (please see the "LICENSE.md" file). - * If not, see . - * - * @author Anatoly Khaytovich - */ -import { log } from './FsOps'; -import Conversion from './Conversion'; -import DBAccess from './DBAccess'; -import DBVendors from './DBVendors'; -import DBAccessQueryResult from './DBAccessQueryResult'; -import IDBAccessQueryParams from './IDBAccessQueryParams'; -import * as extraConfigProcessor from './ExtraConfigProcessor'; - -/** - * Runs "vacuum full" and "analyze". - */ -export default async function(conversion: Conversion): Promise { - const logTitle: string = 'VacuumProcessor::default'; - - const vacuumPromises: Promise[] = conversion._tablesToMigrate.map(async (table: string) => { - if (conversion._noVacuum.indexOf(extraConfigProcessor.getTableName(conversion, table, true)) === -1) { - const tableName = `"${ conversion._schema }"."${ table }"`; - const msg: string = `\t--[${ logTitle }] Running "VACUUM FULL and ANALYZE" query for table ${ tableName }...`; - - log(conversion, msg); - const params: IDBAccessQueryParams = { - conversion: conversion, - caller: logTitle, - sql: `VACUUM (FULL, ANALYZE) ${ tableName };`, - vendor: DBVendors.PG, - processExitOnError: false, - shouldReturnClient: false - }; - - const result: DBAccessQueryResult = await DBAccess.query(params); - - if (!result.error) { - const msgSuccess: string = `\t--[${ logTitle }] Table ${ tableName } is VACUUMed...`; - log(conversion, msgSuccess); - } - } - }); - - await Promise.all(vacuumPromises); -} diff --git a/test/TestModules/ColumnTypesTest.ts b/test/TestModules/ColumnTypesTest.ts index 97f8a965..31f7fead 100644 --- a/test/TestModules/ColumnTypesTest.ts +++ b/test/TestModules/ColumnTypesTest.ts @@ -97,7 +97,8 @@ function getExpectedColumnTypes(): Map { ['mediumblob', 'bytea'], ['tinyblob', 'bytea'], ['varbinary', 'bytea'], - ['binary', 'bytea'] + ['binary', 'bytea'], + ['null_char_in_varchar', 'character varying'] ]); } diff --git a/test/TestModules/DataContentTest.ts b/test/TestModules/DataContentTest.ts index 3cf3e8e9..0f639771 100644 --- a/test/TestModules/DataContentTest.ts +++ b/test/TestModules/DataContentTest.ts @@ -67,7 +67,7 @@ export default async function(testSchemaProcessor: TestSchemaProcessor, tape: Te tape.equal(data.blob_text, originalTestBlobText); tape.comment('Test bit column value'); - tape.equal(data.bit, '1'); // BIT is actually a "bit string", for example: '1110' -> 14 + tape.equal(data.bit, '1'); // BIT is actually a "bit string". tape.comment('Test id_test_unique_index column value'); tape.equal(data.id_test_unique_index, 7384); diff --git a/test/TestModules/TestSchemaProcessor.ts b/test/TestModules/TestSchemaProcessor.ts index 3ff569f0..2cf6cfc4 100644 --- a/test/TestModules/TestSchemaProcessor.ts +++ b/test/TestModules/TestSchemaProcessor.ts @@ -29,9 +29,13 @@ import DBAccessQueryResult from '../../src/DBAccessQueryResult'; import createSchema from '../../src/SchemaProcessor'; import loadStructureToMigrate from '../../src/StructureLoader'; import pipeData from '../../src/DataPipeManager'; -import { createStateLogsTable } from '../../src/MigrationStateManager'; +import decodeBinaryData from '../../src/BinaryDataDecoder'; +import generateReport from '../../src/ReportGenerator'; +import { dropDataPoolTable } from '../../src/DataPoolManager'; +import { processConstraints } from '../../src/ConstraintsProcessor'; +import { createStateLogsTable, dropStateLogsTable } from '../../src/MigrationStateManager'; import { createDataPoolTable, readDataPool } from '../../src/DataPoolManager'; -import { checkConnection, getLogo } from '../../src/BootProcessor'; +import { checkConnection, getLogo, getConfAndLogsPaths } from '../../src/BootProcessor'; import { createLogsDirectory, generateError, log, readConfig, readDataTypesMap, readExtraConfig } from '../../src/FsOps'; import ErrnoException = NodeJS.ErrnoException; @@ -240,7 +244,7 @@ export default class TestSchemaProcessor { }; const insertParamsKeys: string[] = Object.keys(insertParams).map((k: string) => `\`${ k }\``); - const sql: string = `INSERT INTO \`table_a\`(${ insertParamsKeys.join(',') }) VALUES(${ insertParamsKeys.map((k: string) => '?').join(',') });`; + const sql: string = `INSERT INTO \`table_a\`(${ insertParamsKeys.join(',') }) VALUES(${ insertParamsKeys.map((_: string) => '?').join(',') });`; const params: IDBAccessQueryParams = { conversion: this.conversion, caller: 'TestSchemaProcessor::_loadTestData', @@ -260,9 +264,9 @@ export default class TestSchemaProcessor { * Initializes Conversion instance. */ public async initializeConversion(): Promise { - const baseDir: string = path.join(__dirname, '..', '..', '..'); - const config: any = await readConfig(baseDir, 'test_config.json'); - const fullConfig: any = await readExtraConfig(config, baseDir); + const { confPath, logsPath } = getConfAndLogsPaths(); + const config: any = await readConfig(confPath, logsPath, 'test_config.json'); + const fullConfig: any = await readExtraConfig(config, confPath); this.conversion = await Conversion.initializeConversion(fullConfig); this.conversion._runsInTestMode = true; this.conversion._eventEmitter = new EventEmitter(); @@ -298,6 +302,12 @@ export default class TestSchemaProcessor { .then(loadStructureToMigrate) .then(readDataPool) .then(pipeData) + .then(decodeBinaryData) + .then(processConstraints) + .then(dropDataPoolTable) + .then(dropStateLogsTable) + .then(DBAccess.closeConnectionPools) + .then(generateReport) .catch(error => console.log(error)); } } diff --git a/test/test_schema.sql b/test/test_schema.sql index eed38ccc..715d2655 100644 --- a/test/test_schema.sql +++ b/test/test_schema.sql @@ -39,6 +39,7 @@ CREATE TABLE IF NOT EXISTS `table_a`( `mediumblob` MEDIUMBLOB, `longblob` LONGBLOB, `blob` BLOB, + `null_char_in_varchar` VARCHAR(6) NOT NULL DEFAULT x'373300350035', PRIMARY KEY(`id_test_sequence`), UNIQUE KEY(`id_test_unique_index`), UNIQUE INDEX(`id_test_composite_unique_index_1`, `id_test_composite_unique_index_2`),