diff --git a/.github/ISSUE_TEMPLATE/01-bug.yml b/.github/ISSUE_TEMPLATE/01-bug.yml new file mode 100644 index 0000000000000..63b0634e6facd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/01-bug.yml @@ -0,0 +1,82 @@ +name: Bug Report +description: Create a bug report to help us improve +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: textarea + id: description + attributes: + label: Bug Description + description: A clear and concise description of what the bug is + placeholder: Tell us what you see! + validations: + required: true + - type: textarea + id: reproduction + attributes: + label: To Reproduce + description: Steps to reproduce the behavior + placeholder: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + - type: textarea + id: expected + attributes: + label: Expected behavior + description: A clear and concise description of what you expected to happen + validations: + required: true + - type: markdown + attributes: + value: '## Environment' + - type: input + id: os + attributes: + label: Operating System + placeholder: ex. Ubuntu Linux 22.04 + validations: + required: true + - type: input + id: n8n-version + attributes: + label: n8n Version + placeholder: ex. 1.25.0 + validations: + required: true + - type: input + id: nodejs-version + attributes: + label: Node.js Version + placeholder: ex. 18.16.0 + validations: + required: true + - type: dropdown + id: db + attributes: + label: Database + options: + - SQLite (default) + - PostgreSQL + - MySQL + - MariaDB + default: 0 + validations: + required: true + - type: dropdown + id: execution-mode + attributes: + label: Execution mode + description: '[Info](https://docs.n8n.io/hosting/scaling/execution-modes-processes/)' + options: + - main (default) + - queue + - own (deprecated) + default: 0 + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 91b6a5669c937..0000000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: - -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Environment (please complete the following information):** - -- OS: [e.g. Ubuntu Linux 22.04] -- n8n Version [e.g. 1.0.1] -- Node.js Version [e.g. 18.16.0] -- Database system [e.g. SQLite; n8n uses SQLite as default otherwise changed] -- Operation mode [e.g. own; operation modes are `own`, `main` and `queue`. Default is `main`] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index c2caf18ab3202..8b62ca7bbbffc 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -6,3 +6,6 @@ contact_links: - name: Question / Problem url: https://community.n8n.io about: Questions and problems with n8n + - name: n8n Security Vulnerability + url: https://n8n.io/legal/#vulnerability + about: Learn about our Vulnerability Disclosure Policy diff --git a/.github/scripts/check-tests.mjs b/.github/scripts/check-tests.mjs index 6c0f37b2aa640..1e2e38e05b064 100644 --- a/.github/scripts/check-tests.mjs +++ b/.github/scripts/check-tests.mjs @@ -1,11 +1,10 @@ -import fs from 'fs'; +import { readFile } from 'fs/promises'; import path from 'path'; import util from 'util'; import { exec } from 'child_process'; import { glob } from 'glob'; import ts from 'typescript'; -const readFileAsync = util.promisify(fs.readFile); const execAsync = util.promisify(exec); const filterAsync = async (asyncPredicate, arr) => { @@ -37,7 +36,7 @@ const isAbstractMethod = (node) => { // Function to check if a file has a function declaration, function expression, object method or class const hasFunctionOrClass = async (filePath) => { - const fileContent = await readFileAsync(filePath, 'utf-8'); + const fileContent = await readFile(filePath, 'utf-8'); const sourceFile = ts.createSourceFile(filePath, fileContent, ts.ScriptTarget.Latest, true); let hasFunctionOrClass = false; diff --git a/.github/workflows/check-documentation-urls.yml b/.github/workflows/check-documentation-urls.yml index 42d7f317de7f9..b14daec6b056c 100644 --- a/.github/workflows/check-documentation-urls.yml +++ b/.github/workflows/check-documentation-urls.yml @@ -16,8 +16,7 @@ jobs: steps: - uses: actions/checkout@v4.1.1 - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x diff --git a/.github/workflows/check-issue-template.yml b/.github/workflows/check-issue-template.yml deleted file mode 100644 index 7eeff27a3708b..0000000000000 --- a/.github/workflows/check-issue-template.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: Check Issue Template - -on: - issues: - types: [opened, edited] - -jobs: - check-issue: - name: Check Issue Template - runs-on: ubuntu-latest - steps: - - name: Run Check Issue Template - uses: n8n-io/validate-issue-template-content@v1 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/check-pr-title.yml b/.github/workflows/check-pr-title.yml index 77d8defe4b5fc..a66ae1232f27a 100644 --- a/.github/workflows/check-pr-title.yml +++ b/.github/workflows/check-pr-title.yml @@ -18,8 +18,7 @@ jobs: - name: Check out branch uses: actions/checkout@v4.1.1 - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x @@ -30,6 +29,6 @@ jobs: - name: Validate PR title id: validate_pr_title - uses: n8n-io/validate-n8n-pull-request-title@v2.0.0 + uses: n8n-io/validate-n8n-pull-request-title@v2.0.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci-master.yml b/.github/workflows/ci-master.yml index ba6547c696a87..41ee830bd58ae 100644 --- a/.github/workflows/ci-master.yml +++ b/.github/workflows/ci-master.yml @@ -18,8 +18,7 @@ jobs: steps: - uses: actions/checkout@v4.1.1 - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v4.0.1 with: @@ -64,8 +63,7 @@ jobs: repository: n8n-io/n8n ref: ${{ inputs.branch }} - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v4.0.1 with: diff --git a/.github/workflows/ci-postgres-mysql.yml b/.github/workflows/ci-postgres-mysql.yml index 16c526034252e..b8d0395277286 100644 --- a/.github/workflows/ci-postgres-mysql.yml +++ b/.github/workflows/ci-postgres-mysql.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4.1.1 - - uses: pnpm/action-setup@v2.4.0 + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x @@ -43,7 +43,7 @@ jobs: DB_MYSQLDB_PASSWORD: password steps: - uses: actions/checkout@v4.1.1 - - uses: pnpm/action-setup@v2.4.0 + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x @@ -76,7 +76,7 @@ jobs: DB_POSTGRESDB_PASSWORD: password steps: - uses: actions/checkout@v4.1.1 - - uses: pnpm/action-setup@v2.4.0 + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x diff --git a/.github/workflows/ci-pull-requests.yml b/.github/workflows/ci-pull-requests.yml index 4f966c8ff71fd..17bfb05891725 100644 --- a/.github/workflows/ci-pull-requests.yml +++ b/.github/workflows/ci-pull-requests.yml @@ -12,8 +12,7 @@ jobs: repository: n8n-io/n8n ref: refs/pull/${{ github.event.pull_request.number }}/merge - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - name: Use Node.js 18 uses: actions/setup-node@v4.0.1 with: @@ -50,8 +49,7 @@ jobs: repository: n8n-io/n8n ref: refs/pull/${{ github.event.pull_request.number }}/merge - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - name: Use Node.js 18 uses: actions/setup-node@v4.0.1 with: diff --git a/.github/workflows/release-create-pr.yml b/.github/workflows/release-create-pr.yml index aebfaa5693075..612ef87920667 100644 --- a/.github/workflows/release-create-pr.yml +++ b/.github/workflows/release-create-pr.yml @@ -35,7 +35,7 @@ jobs: fetch-depth: 0 ref: ${{ github.event.inputs.base-branch }} - - uses: pnpm/action-setup@v2.4.0 + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x diff --git a/.github/workflows/release-publish.yml b/.github/workflows/release-publish.yml index 3d8c103e50981..f63166b6b5950 100644 --- a/.github/workflows/release-publish.yml +++ b/.github/workflows/release-publish.yml @@ -23,7 +23,7 @@ jobs: with: fetch-depth: 0 - - uses: pnpm/action-setup@v2.4.0 + - run: corepack enable - uses: actions/setup-node@v4.0.1 with: node-version: 18.x diff --git a/.github/workflows/test-workflows.yml b/.github/workflows/test-workflows.yml index fcb70bd7aa05f..7856b42f4fd92 100644 --- a/.github/workflows/test-workflows.yml +++ b/.github/workflows/test-workflows.yml @@ -23,9 +23,8 @@ jobs: repository: n8n-io/test-workflows path: test-workflows - - uses: pnpm/action-setup@v2.4.0 - with: - package_json_file: n8n/package.json + - run: corepack enable + working-directory: n8n - uses: actions/setup-node@v4.0.1 with: diff --git a/.github/workflows/units-tests-reusable.yml b/.github/workflows/units-tests-reusable.yml index f6325ab2869cd..386612678c1e5 100644 --- a/.github/workflows/units-tests-reusable.yml +++ b/.github/workflows/units-tests-reusable.yml @@ -35,8 +35,7 @@ jobs: repository: n8n-io/n8n ref: ${{ inputs.ref }} - - uses: pnpm/action-setup@v2.4.0 - + - run: corepack enable - name: Use Node.js ${{ inputs.nodeVersion }} uses: actions/setup-node@v4.0.1 with: diff --git a/CHANGELOG.md b/CHANGELOG.md index 08cd3c2320ed5..df9da9b409c29 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,77 @@ +# [1.28.0](https://github.com/n8n-io/n8n/compare/n8n@1.27.0...n8n@1.28.0) (2024-02-07) + + +### Bug Fixes + +* Allow Date/Luxon objects and additional formats in DateTime validation ([#8525](https://github.com/n8n-io/n8n/issues/8525)) ([c419c85](https://github.com/n8n-io/n8n/commit/c419c8592f16a002d9f460c555db5f8bc04d95b8)) +* **core:** Ensure AxiosError status always gets copied over to NodeApiError ([#8509](https://github.com/n8n-io/n8n/issues/8509)) ([76c5a62](https://github.com/n8n-io/n8n/commit/76c5a62f521fbd79a47d47f419fd90dcc6ca0275)) +* **core:** Fix DropRoleMapping migration ([#8521](https://github.com/n8n-io/n8n/issues/8521)) ([4fed68e](https://github.com/n8n-io/n8n/commit/4fed68ee34c239ed3cae8541d5d4e7f86cf28d13)) +* **core:** Fix new graceful shutdown env being always overridden by deprecated env ([#8503](https://github.com/n8n-io/n8n/issues/8503)) ([cc41fc7](https://github.com/n8n-io/n8n/commit/cc41fc7c801ecb1b0de4b7c5af5a03b3f30fdce8)) +* **core:** Fix PermissionChecker.check, and add additional unit tests ([#8528](https://github.com/n8n-io/n8n/issues/8528)) ([5832d3c](https://github.com/n8n-io/n8n/commit/5832d3ca4695ec812e028e40b41811ca2215c0e2)) +* **core:** Fix test runs of triggers that rely on static data ([#8524](https://github.com/n8n-io/n8n/issues/8524)) ([528c071](https://github.com/n8n-io/n8n/commit/528c07134a6705c8c7a5378f15f5e4a4b93234a9)) +* **core:** Fix workflow tagging failure due to unique constraint check ([#8505](https://github.com/n8n-io/n8n/issues/8505)) ([92f939f](https://github.com/n8n-io/n8n/commit/92f939f82799975a5f9b859e7f342f3440961320)) +* **core:** Upgrade nodemailer to address an exploit ([#8535](https://github.com/n8n-io/n8n/issues/8535)) ([ee5e422](https://github.com/n8n-io/n8n/commit/ee5e4220945fde5baaec9ad66ff08b8728912aa3)) +* **core:** Use hostname from URL instead of Host header for SNI ([#8562](https://github.com/n8n-io/n8n/issues/8562)) ([7531f34](https://github.com/n8n-io/n8n/commit/7531f343861d91df075b8f3220f5bce8858b117f)) +* **core:** Use trx manager instead of repository for tags overwrite ([#8557](https://github.com/n8n-io/n8n/issues/8557)) ([abddbb6](https://github.com/n8n-io/n8n/commit/abddbb622798bdc4a2b11347a09f10aaf03a4639)) +* **editor:** Prune values that are not in the schema in the ResourceMapper component ([#8478](https://github.com/n8n-io/n8n/issues/8478)) ([612771e](https://github.com/n8n-io/n8n/commit/612771e0328f7e565d5f075cc20ad86bd1f13bce)) +* **Embeddings OpenAI Node:** Fix dynamic models fetching ([#8533](https://github.com/n8n-io/n8n/issues/8533)) ([cccdfc7](https://github.com/n8n-io/n8n/commit/cccdfc73d6c13a37e395fdc2612f2ebf458a4f52)) +* **HTTP Request Node:** Require parameter with filled name and value to avoid infinite loop ([#8454](https://github.com/n8n-io/n8n/issues/8454)) ([3128dca](https://github.com/n8n-io/n8n/commit/3128dca1faeff85d77a28640b7dfe8fbcd85db4f)) +* **HTTP Request Node:** Support form data when using pagination ([#8497](https://github.com/n8n-io/n8n/issues/8497)) ([ca75744](https://github.com/n8n-io/n8n/commit/ca75744c7f93db827ece5bf3b17b82f07d4cffb1)) +* **Microsoft Excel 365 Node:** Upsert append new rows at the end of used range, option to append at the end of selected range ([#8461](https://github.com/n8n-io/n8n/issues/8461)) ([1e02d73](https://github.com/n8n-io/n8n/commit/1e02d73ad782fb21cdd9b7350e34beb731a677c5)) +* **MongoDB Node:** Fix "Maximum call stack size exceeded" error on too many rows ([#8530](https://github.com/n8n-io/n8n/issues/8530)) ([76cdf75](https://github.com/n8n-io/n8n/commit/76cdf75fb96fb55b4a6e7a1a9edc7c9674806391)) +* **Slack Node:** Attachments fix ([#8471](https://github.com/n8n-io/n8n/issues/8471)) ([254700a](https://github.com/n8n-io/n8n/commit/254700a059a48a66dea8b9d80e61c8250e09d5b5)) +* Update BaseChatModel import checks for MistralAI compatibility ([#8527](https://github.com/n8n-io/n8n/issues/8527)) ([c8b8379](https://github.com/n8n-io/n8n/commit/c8b83790150b9974d6d99f6a2b6b5b7be6fb8c53)) + + +### Features + +* Add assignment component with drag and drop to Set node ([#8283](https://github.com/n8n-io/n8n/issues/8283)) ([2799de4](https://github.com/n8n-io/n8n/commit/2799de491b753e6fb00f73b61393392b6fc8ad18)) +* Azure Open AI chat model & embeddings ([#8522](https://github.com/n8n-io/n8n/issues/8522)) ([934d0d3](https://github.com/n8n-io/n8n/commit/934d0d35b1814c0a39595d61a30fb9c2e05fd995)) +* **editor:** Add delete and disable button to nodes on hover ([#8482](https://github.com/n8n-io/n8n/issues/8482)) ([994754b](https://github.com/n8n-io/n8n/commit/994754bf39976c5bb33fd1c30a0eb82cc518850b)) +* **Email Trigger (IMAP) Node:** Upgrade mailparser ([#8539](https://github.com/n8n-io/n8n/issues/8539)) ([da1fe44](https://github.com/n8n-io/n8n/commit/da1fe44d5246848e2ba7bb8bc5f4577685fbcee0)) +* **RabbitMQ Trigger Node:** Add options to configure assert of exchanges and queues ([#8430](https://github.com/n8n-io/n8n/issues/8430)) ([4b3659f](https://github.com/n8n-io/n8n/commit/4b3659f04f84ef774d31cf2341c5dbb500a73afe)) + + + +# [1.27.0](https://github.com/n8n-io/n8n/compare/n8n@1.26.0...n8n@1.27.0) (2024-01-31) + + +### Bug Fixes + +* **AwsS3 Node:** Fix handling of bucket with dot in name ([#8475](https://github.com/n8n-io/n8n/issues/8475)) ([0febe62](https://github.com/n8n-io/n8n/commit/0febe62ad03f82b85922e0cb66d60eeb22b3a919)) +* **core:** Don't report executions that have been paused as failed to rudderstack and log streams ([#8501](https://github.com/n8n-io/n8n/issues/8501)) ([39e8754](https://github.com/n8n-io/n8n/commit/39e875478488f3c81147944bd6b52dc1f9def958)) +* **core:** Fix stopping and retrying failed executions ([#8480](https://github.com/n8n-io/n8n/issues/8480)) ([238b54c](https://github.com/n8n-io/n8n/commit/238b54c77bba6f7abcc7fc2b3ac48a85206ce37e)) +* **core:** Forward authorization header when on same domain ([#8507](https://github.com/n8n-io/n8n/issues/8507)) ([f1910a1](https://github.com/n8n-io/n8n/commit/f1910a10a6ac875b422d9efe9bfd3ca728ac8d96)) +* **core:** Handle possibly invalid `updatedAt` timestamps in source-control ([#8485](https://github.com/n8n-io/n8n/issues/8485)) ([033fd34](https://github.com/n8n-io/n8n/commit/033fd344b5a09a7b9c0b941279da1744e182cc57)) +* **core:** Handle zero execution statistics on metrics collection during license renewal ([#8463](https://github.com/n8n-io/n8n/issues/8463)) ([db48bdd](https://github.com/n8n-io/n8n/commit/db48bdd6d1110bfccc371ca490281c021227991f)) +* **core:** Improve handling of wrapped errors ([#8510](https://github.com/n8n-io/n8n/issues/8510)) ([670af16](https://github.com/n8n-io/n8n/commit/670af167e6f2c0e4e45bcc7e0998a371d651f89d)) +* **core:** Point users to the official documentation when they use `n8n --help` ([#8440](https://github.com/n8n-io/n8n/issues/8440)) ([9f11eba](https://github.com/n8n-io/n8n/commit/9f11eba0a4f726cc150b05c0f4b528bf25513e6f)) +* **core:** Prevent calling internal hook email event if emailing is disabled ([#8462](https://github.com/n8n-io/n8n/issues/8462)) ([9e93980](https://github.com/n8n-io/n8n/commit/9e939809575592622f6bdca112da1905ac9205ef)) +* **editor:** Disable expression editor modal opening on readonly field ([#8457](https://github.com/n8n-io/n8n/issues/8457)) ([eb27ed0](https://github.com/n8n-io/n8n/commit/eb27ed068ba21bbf4302686f0f0c0168e91c03f6)) +* **editor:** Fix workflows filter resetting ([#8411](https://github.com/n8n-io/n8n/issues/8411)) ([ad4b298](https://github.com/n8n-io/n8n/commit/ad4b298be34915718b0415322e328a1b46ef5842)) +* **editor:** Send template id as a number in telemetry events ([#8484](https://github.com/n8n-io/n8n/issues/8484)) ([327cc8d](https://github.com/n8n-io/n8n/commit/327cc8df7343b806bee87faaa86ed22d9d70127f)) +* **editor:** Show pin button on binary output but disable it with tooltip ([#8388](https://github.com/n8n-io/n8n/issues/8388)) ([caab97e](https://github.com/n8n-io/n8n/commit/caab97e667df5d305aa1d2e15c0d31eb5f1a84eb)) +* **Gotify Node:** Add option to set content type to support Markdown messages ([#8442](https://github.com/n8n-io/n8n/issues/8442)) ([c2ffd4e](https://github.com/n8n-io/n8n/commit/c2ffd4e6455b383e4ddc2eb310cefbcdf867d622)) +* **HTML Node:** Escape data path value in JSON Property ([#8441](https://github.com/n8n-io/n8n/issues/8441)) ([fc5c562](https://github.com/n8n-io/n8n/commit/fc5c5627850bf618be4ca0d9cdb20adb0f8610e9)) +* **Merge Node:** Passing on no items to "Input 2" results in wrong output items ([#8438](https://github.com/n8n-io/n8n/issues/8438)) ([dafacb9](https://github.com/n8n-io/n8n/commit/dafacb90c6b01e2f88c3de359ebb2d8d55e0aecc)) +* **Microsoft Excel 365 Node:** Better error and description on unsupported range in upsert, update, getRange operations ([#8452](https://github.com/n8n-io/n8n/issues/8452)) ([8a595d1](https://github.com/n8n-io/n8n/commit/8a595d1527bf8cd97ce8293c6a52929eb3335750)) +* Open executions with large number of execution items without crashing tab ([#8423](https://github.com/n8n-io/n8n/issues/8423)) ([56da2e4](https://github.com/n8n-io/n8n/commit/56da2e43528f157c421e97018366b8f2d854d11d)) +* Properly iterate over credentials with expressions ([#8502](https://github.com/n8n-io/n8n/issues/8502)) ([0e9a5a2](https://github.com/n8n-io/n8n/commit/0e9a5a2ab2cde251cf106b149bdd4c3142e52b40)) +* Use correct node version when pasting/importing nodes ([#8456](https://github.com/n8n-io/n8n/issues/8456)) ([70af67e](https://github.com/n8n-io/n8n/commit/70af67e744c709b85b600811b792def83518fa7c)) + + +### Features + +* Add model parameter to OpenAI embeddings ([#8481](https://github.com/n8n-io/n8n/issues/8481)) ([981ea39](https://github.com/n8n-io/n8n/commit/981ea3930e96c3b45267fa7ddac48710846e49ac)) +* Add new 'is empty' and 'is not empty' operators to Filter ([#8445](https://github.com/n8n-io/n8n/issues/8445)) ([c21c4b9](https://github.com/n8n-io/n8n/commit/c21c4b9178b3ae328ff7a068663eeb64fed3e465)) +* **core:** Remove `own` execution-process mode ([#8490](https://github.com/n8n-io/n8n/issues/8490)) ([121a55b](https://github.com/n8n-io/n8n/commit/121a55b691469e7eb042737573c0ace276366ecb)) +* **core:** Upgrade Rudderstack SDK to address CVE-2023-45857 ([#8368](https://github.com/n8n-io/n8n/issues/8368)) ([2fba0e8](https://github.com/n8n-io/n8n/commit/2fba0e8d585aead43eaeb73fa49dc9b613900675)) +* **editor:** Implement loading and error states for dynamically loaded components in node parameter list ([#8477](https://github.com/n8n-io/n8n/issues/8477)) ([e643a12](https://github.com/n8n-io/n8n/commit/e643a126f40dbad0634e5abb1e3ba355bad0275d)) +* **editor:** Send template id as string in all telemetry events ([#8498](https://github.com/n8n-io/n8n/issues/8498)) ([2aed788](https://github.com/n8n-io/n8n/commit/2aed788dc354595b070e91fe76298f6702bbbe15)) +* **Google Calendar Node:** Next occurrence property in recurring events ([#8444](https://github.com/n8n-io/n8n/issues/8444)) ([bf11c7c](https://github.com/n8n-io/n8n/commit/bf11c7c1bd5826ba64acc665da4e3319f9a47174)) + + + # [1.26.0](https://github.com/n8n-io/n8n/compare/n8n@1.25.0...n8n@1.26.0) (2024-01-24) diff --git a/cypress/composables/workflow.ts b/cypress/composables/workflow.ts index b1810943a3f25..1518805c6be41 100644 --- a/cypress/composables/workflow.ts +++ b/cypress/composables/workflow.ts @@ -106,14 +106,19 @@ export function addSupplementalNodeToParent( nodeName: string, endpointType: EndpointType, parentNodeName: string, + exactMatch = false, ) { getAddInputEndpointByType(parentNodeName, endpointType).click({ force: true }); - getNodeCreatorItems().contains(nodeName).click(); + if (exactMatch) { + getNodeCreatorItems().contains(new RegExp("^" + nodeName + "$", "g")).click(); + } else { + getNodeCreatorItems().contains(nodeName).click(); + } getConnectionBySourceAndTarget(parentNodeName, nodeName).should('exist'); } -export function addLanguageModelNodeToParent(nodeName: string, parentNodeName: string) { - addSupplementalNodeToParent(nodeName, 'ai_languageModel', parentNodeName); +export function addLanguageModelNodeToParent(nodeName: string, parentNodeName: string, exactMatch = false) { + addSupplementalNodeToParent(nodeName, 'ai_languageModel', parentNodeName, exactMatch); } export function addMemoryNodeToParent(nodeName: string, parentNodeName: string) { diff --git a/cypress/e2e/12-canvas-actions.cy.ts b/cypress/e2e/12-canvas-actions.cy.ts index 91f6b65884138..3c517b6c9840f 100644 --- a/cypress/e2e/12-canvas-actions.cy.ts +++ b/cypress/e2e/12-canvas-actions.cy.ts @@ -28,6 +28,8 @@ describe('Canvas Actions', () => { WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); WorkflowPage.getters.nodeViewBackground().click(600, 200, { force: true }); cy.get('.jtk-connector').should('have.length', 1); + + WorkflowPage.getters.nodeViewBackground().click(600, 400, { force: true }); WorkflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); // Change connection from Set to Set1 @@ -154,17 +156,43 @@ describe('Canvas Actions', () => { WorkflowPage.getters.nodeConnections().should('have.length', 0); }); - it('should execute node', () => { - WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); - WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); - WorkflowPage.getters - .canvasNodes() - .last() - .find('[data-test-id="execute-node-button"]') - .click({ force: true }); - WorkflowPage.getters.successToast().should('contain', 'Node executed successfully'); - WorkflowPage.actions.executeNode(CODE_NODE_NAME); - WorkflowPage.getters.successToast().should('contain', 'Node executed successfully'); + describe('Node hover actions', () => { + it('should execute node', () => { + WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + WorkflowPage.getters + .canvasNodes() + .last() + .findChildByTestId('execute-node-button') + .click({ force: true }); + WorkflowPage.actions.executeNode(CODE_NODE_NAME); + WorkflowPage.getters.successToast().should('have.length', 2); + WorkflowPage.getters.successToast().should('contain.text', 'Node executed successfully'); + }); + + it('should disable and enable node', () => { + WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + const disableButton = WorkflowPage.getters + .canvasNodes() + .last() + .findChildByTestId('disable-node-button'); + disableButton.click({ force: true }); + WorkflowPage.getters.disabledNodes().should('have.length', 1); + disableButton.click({ force: true }); + WorkflowPage.getters.disabledNodes().should('have.length', 0); + }); + + it('should delete node', () => { + WorkflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME); + WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); + WorkflowPage.getters + .canvasNodes() + .last() + .find('[data-test-id="delete-node-button"]') + .click({ force: true }); + WorkflowPage.getters.canvasNodes().should('have.length', 1); + }); }); it('should copy selected nodes', () => { diff --git a/cypress/e2e/12-canvas.cy.ts b/cypress/e2e/12-canvas.cy.ts index 359f0cf85140c..c1e06c107d636 100644 --- a/cypress/e2e/12-canvas.cy.ts +++ b/cypress/e2e/12-canvas.cy.ts @@ -313,21 +313,38 @@ describe('Canvas Node Manipulation and Navigation', () => { WorkflowPage.actions.addNodeToCanvas(CODE_NODE_NAME); cy.get('body').type('{esc}'); cy.get('body').type('{esc}'); - WorkflowPage.actions.selectAll(); // Keyboard shortcut + WorkflowPage.actions.selectAll(); WorkflowPage.actions.hitDisableNodeShortcut(); WorkflowPage.getters.disabledNodes().should('have.length', 2); WorkflowPage.actions.hitDisableNodeShortcut(); WorkflowPage.getters.disabledNodes().should('have.length', 0); + WorkflowPage.actions.deselectAll(); + WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); + WorkflowPage.actions.hitDisableNodeShortcut(); + WorkflowPage.getters.disabledNodes().should('have.length', 1); + WorkflowPage.actions.selectAll(); + WorkflowPage.actions.hitDisableNodeShortcut(); + WorkflowPage.getters.disabledNodes().should('have.length', 2); // Context menu + WorkflowPage.actions.selectAll(); + WorkflowPage.actions.openContextMenu(); + WorkflowPage.actions.contextMenuAction('toggle_activation'); + WorkflowPage.getters.disabledNodes().should('have.length', 0); WorkflowPage.actions.openContextMenu(); WorkflowPage.actions.contextMenuAction('toggle_activation'); WorkflowPage.getters.disabledNodes().should('have.length', 2); + WorkflowPage.actions.deselectAll(); + WorkflowPage.getters.canvasNodeByName(MANUAL_TRIGGER_NODE_DISPLAY_NAME).click(); WorkflowPage.actions.openContextMenu(); WorkflowPage.actions.contextMenuAction('toggle_activation'); - WorkflowPage.getters.disabledNodes().should('have.length', 0); + WorkflowPage.getters.disabledNodes().should('have.length', 1); + WorkflowPage.actions.selectAll(); + WorkflowPage.actions.openContextMenu(); + WorkflowPage.actions.contextMenuAction('toggle_activation'); + WorkflowPage.getters.disabledNodes().should('have.length', 2); }); it('should rename node (context menu or shortcut)', () => { diff --git a/cypress/e2e/13-pinning.cy.ts b/cypress/e2e/13-pinning.cy.ts index ed77797f789e9..1643363a5f3c9 100644 --- a/cypress/e2e/13-pinning.cy.ts +++ b/cypress/e2e/13-pinning.cy.ts @@ -188,9 +188,9 @@ describe('Data pinning', () => { function setExpressionOnStringValueInSet(expression: string) { cy.get('button').contains('Test step').click(); - cy.get('.fixed-collection-parameter > :nth-child(2) > .button > span').click(); - ndv.getters.nthParam(4).contains('Expression').invoke('show').click(); + ndv.getters.assignmentCollectionAdd('assignments').click(); + ndv.getters.assignmentValue('assignments').contains('Expression').invoke('show').click(); ndv.getters .inlineExpressionEditorInput() diff --git a/cypress/e2e/14-data-transformation-expressions.cy.ts b/cypress/e2e/14-data-transformation-expressions.cy.ts index 454f1d1749fff..da08ec8817dc7 100644 --- a/cypress/e2e/14-data-transformation-expressions.cy.ts +++ b/cypress/e2e/14-data-transformation-expressions.cy.ts @@ -1,4 +1,5 @@ import { WorkflowPage, NDV } from '../pages'; +import { getVisibleSelect } from '../utils'; const wf = new WorkflowPage(); const ndv = new NDV(); @@ -104,8 +105,6 @@ describe('Data transformation expressions', () => { const addEditFields = () => { wf.actions.addNodeToCanvas('Edit Fields', true, true); - cy.get('.fixed-collection-parameter > :nth-child(2) > .button > span').click(); - ndv.getters.parameterInput('include').click(); // shorten output - cy.get('div').contains('No Input Fields').click(); - ndv.getters.nthParam(4).contains('Expression').invoke('show').click(); + ndv.getters.assignmentCollectionAdd('assignments').click(); + ndv.getters.assignmentValue('assignments').contains('Expression').invoke('show').click(); }; diff --git a/cypress/e2e/16-webhook-node.cy.ts b/cypress/e2e/16-webhook-node.cy.ts index da43a7cf4b14f..d753a143755f8 100644 --- a/cypress/e2e/16-webhook-node.cy.ts +++ b/cypress/e2e/16-webhook-node.cy.ts @@ -181,9 +181,10 @@ describe('Webhook Trigger node', async () => { workflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); workflowPage.actions.openNode(EDIT_FIELDS_SET_NODE_NAME); - cy.get('.fixed-collection-parameter > :nth-child(2) > .button > span').click(); - ndv.getters.nthParam(2).type('data'); - ndv.getters.nthParam(4).invoke('val', cowBase64).trigger('blur'); + ndv.getters.assignmentCollectionAdd('assignments').click(); + ndv.getters.assignmentName('assignments').type('data'); + ndv.getters.assignmentType('assignments').click(); + ndv.getters.assignmentValue('assignments').paste(cowBase64); ndv.getters.backToCanvas().click(); @@ -311,9 +312,9 @@ describe('Webhook Trigger node', async () => { const addEditFields = () => { workflowPage.actions.addNodeToCanvas(EDIT_FIELDS_SET_NODE_NAME); workflowPage.actions.openNode(EDIT_FIELDS_SET_NODE_NAME); - cy.get('.fixed-collection-parameter > :nth-child(2) > .button > span').click(); - ndv.getters.nthParam(2).type('MyValue'); - ndv.getters.nthParam(3).click(); - cy.get('div').contains('Number').click(); - ndv.getters.nthParam(4).type('1234'); + ndv.getters.assignmentCollectionAdd('assignments').click(); + ndv.getters.assignmentName('assignments').type('MyValue'); + ndv.getters.assignmentType('assignments').click(); + getVisibleSelect().find('li').contains('Number').click(); + ndv.getters.assignmentValue('assignments').type('1234'); }; diff --git a/cypress/e2e/30-langchain.cy.ts b/cypress/e2e/30-langchain.cy.ts index 9140acdef2541..7c74be35b477d 100644 --- a/cypress/e2e/30-langchain.cy.ts +++ b/cypress/e2e/30-langchain.cy.ts @@ -83,6 +83,7 @@ describe('Langchain Integration', () => { addLanguageModelNodeToParent( AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, BASIC_LLM_CHAIN_NODE_NAME, + true ); clickCreateNewCredential(); @@ -121,7 +122,7 @@ describe('Langchain Integration', () => { addNodeToCanvas(MANUAL_CHAT_TRIGGER_NODE_NAME, true); addNodeToCanvas(AGENT_NODE_NAME, true); - addLanguageModelNodeToParent(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, AGENT_NODE_NAME); + addLanguageModelNodeToParent(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, AGENT_NODE_NAME, true); clickCreateNewCredential(); setCredentialValues({ @@ -159,7 +160,7 @@ describe('Langchain Integration', () => { addNodeToCanvas(MANUAL_CHAT_TRIGGER_NODE_NAME, true); addNodeToCanvas(AGENT_NODE_NAME, true); - addLanguageModelNodeToParent(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, AGENT_NODE_NAME); + addLanguageModelNodeToParent(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, AGENT_NODE_NAME, true); clickCreateNewCredential(); setCredentialValues({ diff --git a/cypress/e2e/5-ndv.cy.ts b/cypress/e2e/5-ndv.cy.ts index 0c6d0cc8ddec1..d98bc3e1989ca 100644 --- a/cypress/e2e/5-ndv.cy.ts +++ b/cypress/e2e/5-ndv.cy.ts @@ -306,7 +306,7 @@ describe('NDV', () => { ndv.getters.parameterInput('remoteOptions').click(); - ndv.getters.parameterInputIssues('remoteOptions').realHover({ scrollBehavior: false}); + ndv.getters.parameterInputIssues('remoteOptions').realHover({ scrollBehavior: false }); // Remote options dropdown should not be visible ndv.getters.parameterInput('remoteOptions').find('.el-select').should('not.exist'); }); @@ -509,12 +509,12 @@ describe('NDV', () => { workflowPage.actions.openNode('Edit Fields (old)'); ndv.actions.openSettings(); - ndv.getters.nodeVersion().should('have.text', 'Set node version 2 (Latest version: 3.2)'); + ndv.getters.nodeVersion().should('have.text', 'Set node version 2 (Latest version: 3.3)'); ndv.actions.close(); workflowPage.actions.openNode('Edit Fields (latest)'); ndv.actions.openSettings(); - ndv.getters.nodeVersion().should('have.text', 'Edit Fields (Set) node version 3.2 (Latest)'); + ndv.getters.nodeVersion().should('have.text', 'Edit Fields (Set) node version 3.3 (Latest)'); ndv.actions.close(); workflowPage.actions.openNode('Function'); diff --git a/cypress/fixtures/Test_workflow_ndv_version.json b/cypress/fixtures/Test_workflow_ndv_version.json index 7d8f6af924810..409c8be54b208 100644 --- a/cypress/fixtures/Test_workflow_ndv_version.json +++ b/cypress/fixtures/Test_workflow_ndv_version.json @@ -37,7 +37,7 @@ "id": "93aaadac-55fe-4618-b1eb-f63e61d1446a", "name": "Edit Fields (latest)", "type": "n8n-nodes-base.set", - "typeVersion": 3.2, + "typeVersion": 3.3, "position": [ 1720, 780 diff --git a/cypress/pages/ndv.ts b/cypress/pages/ndv.ts index 449ad75eb45f7..71e756ec11d67 100644 --- a/cypress/pages/ndv.ts +++ b/cypress/pages/ndv.ts @@ -94,6 +94,20 @@ export class NDV extends BasePage { this.getters.filterComponent(paramName).getByTestId('filter-remove-condition').eq(index), filterConditionAdd: (paramName: string) => this.getters.filterComponent(paramName).getByTestId('filter-add-condition'), + assignmentCollection: (paramName: string) => + cy.getByTestId(`assignment-collection-${paramName}`), + assignmentCollectionAdd: (paramName: string) => + this.getters.assignmentCollection(paramName).getByTestId('assignment-collection-drop-area'), + assignment: (paramName: string, index = 0) => + this.getters.assignmentCollection(paramName).getByTestId('assignment').eq(index), + assignmentRemove: (paramName: string, index = 0) => + this.getters.assignment(paramName, index).getByTestId('assignment-remove'), + assignmentName: (paramName: string, index = 0) => + this.getters.assignment(paramName, index).getByTestId('assignment-name'), + assignmentValue: (paramName: string, index = 0) => + this.getters.assignment(paramName, index).getByTestId('assignment-value'), + assignmentType: (paramName: string, index = 0) => + this.getters.assignment(paramName, index).getByTestId('assignment-type-select'), searchInput: () => cy.getByTestId('ndv-search'), pagination: () => cy.getByTestId('ndv-data-pagination'), nodeVersion: () => cy.getByTestId('node-version'), @@ -235,6 +249,9 @@ export class NDV extends BasePage { removeFilterCondition: (paramName: string, index: number) => { this.getters.filterConditionRemove(paramName, index).click(); }, + removeAssignment: (paramName: string, index: number) => { + this.getters.assignmentRemove(paramName, index).click(); + }, setInvalidExpression: ({ fieldName, invalidExpression, diff --git a/docker/images/n8n/Dockerfile b/docker/images/n8n/Dockerfile index 83915e568f497..970ec7105b1e8 100644 --- a/docker/images/n8n/Dockerfile +++ b/docker/images/n8n/Dockerfile @@ -8,7 +8,8 @@ ENV N8N_VERSION=${N8N_VERSION} ENV NODE_ENV=production ENV N8N_RELEASE_TYPE=stable RUN set -eux; \ - npm install -g --omit=dev n8n@${N8N_VERSION} && \ + npm install -g --omit=dev n8n@${N8N_VERSION} --ignore-scripts && \ + npm rebuild --prefix=/usr/local/lib/node_modules/n8n sqlite3 && \ rm -rf /usr/local/lib/node_modules/n8n/node_modules/@n8n/chat && \ rm -rf /usr/local/lib/node_modules/n8n/node_modules/n8n-design-system && \ rm -rf /usr/local/lib/node_modules/n8n/node_modules/n8n-editor-ui/node_modules && \ diff --git a/package.json b/package.json index baa1391b4528c..93f2d9376513f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "n8n-monorepo", - "version": "1.26.0", + "version": "1.28.0", "private": true, "homepage": "https://n8n.io", "engines": { @@ -90,7 +90,6 @@ "tough-cookie": "^4.1.3", "tslib": "^2.6.1", "tsconfig-paths": "^4.2.0", - "ts-node": "^10.9.1", "typescript": "^5.3.0", "xml2js": "^0.5.0", "cpy@8>globby": "^11.1.0", diff --git a/packages/@n8n/client-oauth2/package.json b/packages/@n8n/client-oauth2/package.json index c6e69990ae173..8d4edbf487f72 100644 --- a/packages/@n8n/client-oauth2/package.json +++ b/packages/@n8n/client-oauth2/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/client-oauth2", - "version": "0.13.0", + "version": "0.14.0", "scripts": { "clean": "rimraf dist .turbo", "dev": "pnpm watch", diff --git a/packages/@n8n/nodes-langchain/credentials/AzureOpenAiApi.credentials.ts b/packages/@n8n/nodes-langchain/credentials/AzureOpenAiApi.credentials.ts new file mode 100644 index 0000000000000..0828a3e204d61 --- /dev/null +++ b/packages/@n8n/nodes-langchain/credentials/AzureOpenAiApi.credentials.ts @@ -0,0 +1,43 @@ +import type { IAuthenticateGeneric, ICredentialType, INodeProperties } from 'n8n-workflow'; + +export class AzureOpenAiApi implements ICredentialType { + name = 'azureOpenAiApi'; + + displayName = 'Azure Open AI'; + + documentationUrl = 'azureopenai'; + + properties: INodeProperties[] = [ + { + displayName: 'API Key', + name: 'apiKey', + type: 'string', + typeOptions: { password: true }, + required: true, + default: '', + }, + { + displayName: 'Resource Name', + name: 'resourceName', + type: 'string', + required: true, + default: '', + }, + { + displayName: 'API Version', + name: 'apiVersion', + type: 'string', + required: true, + default: '2023-05-15', + }, + ]; + + authenticate: IAuthenticateGeneric = { + type: 'generic', + properties: { + headers: { + 'api-key': '={{$credentials.apiKey}}', + }, + }, + }; +} diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts index dd0ff5c07b328..e371c862ef23f 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts @@ -6,24 +6,21 @@ import { } from 'n8n-workflow'; import { initializeAgentExecutorWithOptions } from 'langchain/agents'; -import { BaseChatModel } from 'langchain/chat_models/base'; import type { Tool } from 'langchain/tools'; import type { BaseChatMemory } from 'langchain/memory'; import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { PromptTemplate } from 'langchain/prompts'; import { CombiningOutputParser } from 'langchain/output_parsers'; +import { isChatInstance } from '../../../../../utils/helpers'; export async function conversationalAgentExecute( this: IExecuteFunctions, ): Promise { this.logger.verbose('Executing Conversational Agent'); - const model = (await this.getInputConnectionData( - NodeConnectionType.AiLanguageModel, - 0, - )) as BaseChatModel; + const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0); - if (!(model instanceof BaseChatModel)) { + if (!isChatInstance(model)) { throw new NodeOperationError(this.getNode(), 'Conversational Agent requires Chat Model'); } diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts index 492272f5af74c..140d6444feacc 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts @@ -11,7 +11,8 @@ import type { Tool } from 'langchain/tools'; import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { PromptTemplate } from 'langchain/prompts'; import { CombiningOutputParser } from 'langchain/output_parsers'; -import { BaseChatModel } from 'langchain/chat_models/base'; +import type { BaseChatModel } from 'langchain/chat_models/base'; +import { isChatInstance } from '../../../../../utils/helpers'; export async function reActAgentAgentExecute( this: IExecuteFunctions, @@ -38,7 +39,7 @@ export async function reActAgentAgentExecute( }; let agent: ChatAgent | ZeroShotAgent; - if (model instanceof BaseChatModel) { + if (isChatInstance(model)) { agent = ChatAgent.fromLLMAndTools(model, tools, { prefix: options.prefix, suffix: options.suffixChat, diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts index b52c0d1634ce7..edeb2e01a6dc0 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts @@ -9,7 +9,7 @@ import { SqlDatabase } from 'langchain/sql_db'; import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql'; import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql'; import type { BaseLanguageModel } from 'langchain/dist/base_language'; -import type { DataSource } from 'typeorm'; +import type { DataSource } from '@n8n/typeorm'; import { getSqliteDataSource } from './other/handlers/sqlite'; import { getPostgresDataSource } from './other/handlers/postgres'; diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts index f04c84424867e..ea1b360f046fe 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/mysql.ts @@ -1,5 +1,5 @@ import { type IExecuteFunctions } from 'n8n-workflow'; -import { DataSource } from 'typeorm'; +import { DataSource } from '@n8n/typeorm'; export async function getMysqlDataSource(this: IExecuteFunctions): Promise { const credentials = await this.getCredentials('mySql'); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts index 9688ed6e65701..6971d9119f27a 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/postgres.ts @@ -1,5 +1,5 @@ import { type IExecuteFunctions } from 'n8n-workflow'; -import { DataSource } from 'typeorm'; +import { DataSource } from '@n8n/typeorm'; export async function getPostgresDataSource(this: IExecuteFunctions): Promise { const credentials = await this.getCredentials('postgres'); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts index a80027ea9f95c..27950390a3566 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/other/handlers/sqlite.ts @@ -3,7 +3,7 @@ import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow'; import * as temp from 'temp'; import * as sqlite3 from 'sqlite3'; -import { DataSource } from 'typeorm'; +import { DataSource } from '@n8n/typeorm'; export function getSqliteDataSource( this: IExecuteFunctions, diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts index d1a5363636715..1fd27972b143b 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts @@ -19,9 +19,10 @@ import { import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { CombiningOutputParser } from 'langchain/output_parsers'; import { LLMChain } from 'langchain/chains'; -import { BaseChatModel } from 'langchain/chat_models/base'; +import type { BaseChatModel } from 'langchain/chat_models/base'; import { HumanMessage } from 'langchain/schema'; import { getTemplateNoticeField } from '../../../utils/sharedFields'; +import { isChatInstance } from '../../../utils/helpers'; interface MessagesTemplate { type: string; @@ -94,7 +95,7 @@ async function getChainPromptTemplate( partialVariables: formatInstructions ? { formatInstructions } : undefined, }); - if (llm instanceof BaseChatModel) { + if (isChatInstance(llm)) { const parsedMessages = await Promise.all( (messages ?? []).map(async (message) => { const messageClass = [ diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts new file mode 100644 index 0000000000000..e4503665a6c0b --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts @@ -0,0 +1,126 @@ +/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { + NodeConnectionType, + type IExecuteFunctions, + type INodeType, + type INodeTypeDescription, + type SupplyData, +} from 'n8n-workflow'; + +import { OpenAIEmbeddings } from 'langchain/embeddings/openai'; +import { logWrapper } from '../../../utils/logWrapper'; +import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +export class EmbeddingsAzureOpenAi implements INodeType { + description: INodeTypeDescription = { + displayName: 'Embeddings Azure OpenAI', + name: 'embeddingsAzureOpenAi', + icon: 'file:azure.svg', + credentials: [ + { + name: 'azureOpenAiApi', + required: true, + }, + ], + group: ['transform'], + version: 1, + description: 'Use Embeddings Azure OpenAI', + defaults: { + name: 'Embeddings Azure OpenAI', + }, + + codex: { + categories: ['AI'], + subcategories: { + AI: ['Embeddings'], + }, + resources: { + primaryDocumentation: [ + { + url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsazureopenai/', + }, + ], + }, + }, + // eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node + inputs: [], + // eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong + outputs: [NodeConnectionType.AiEmbedding], + outputNames: ['Embeddings'], + properties: [ + getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]), + { + displayName: 'Model (Deployment) Name', + name: 'model', + type: 'string', + description: 'The name of the model(deployment) to use', + default: '', + }, + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + options: [ + { + displayName: 'Batch Size', + name: 'batchSize', + default: 512, + typeOptions: { maxValue: 2048 }, + description: 'Maximum number of documents to send in each request', + type: 'number', + }, + { + displayName: 'Strip New Lines', + name: 'stripNewLines', + default: true, + description: 'Whether to strip new lines from the input text', + type: 'boolean', + }, + { + displayName: 'Timeout', + name: 'timeout', + default: -1, + description: + 'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.', + type: 'number', + }, + ], + }, + ], + }; + + async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + this.logger.verbose('Supply data for embeddings'); + const credentials = (await this.getCredentials('azureOpenAiApi')) as { + apiKey: string; + resourceName: string; + apiVersion: string; + }; + const modelName = this.getNodeParameter('model', itemIndex) as string; + + const options = this.getNodeParameter('options', itemIndex, {}) as { + batchSize?: number; + stripNewLines?: boolean; + timeout?: number; + }; + + if (options.timeout === -1) { + options.timeout = undefined; + } + + const embeddings = new OpenAIEmbeddings({ + azureOpenAIApiDeploymentName: modelName, + azureOpenAIApiInstanceName: credentials.resourceName, + azureOpenAIApiKey: credentials.apiKey, + azureOpenAIApiVersion: credentials.apiVersion, + ...options, + }); + + return { + response: logWrapper(embeddings, this), + }; + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/azure.svg b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/azure.svg new file mode 100644 index 0000000000000..bbbc6c33b3bcd --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/azure.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts index edc4a48679557..eddacc2f1e3fa 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts @@ -103,6 +103,11 @@ export class EmbeddingsOpenAi implements INodeType { // eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong outputs: [NodeConnectionType.AiEmbedding], outputNames: ['Embeddings'], + requestDefaults: { + ignoreHttpStatusErrors: true, + baseURL: + '={{ $parameter.options?.baseURL?.split("/").slice(0,-1).join("/") || "https://api.openai.com" }}', + }, properties: [ getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]), { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts new file mode 100644 index 0000000000000..764b92312136e --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts @@ -0,0 +1,169 @@ +/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ +import { + NodeConnectionType, + type IExecuteFunctions, + type INodeType, + type INodeTypeDescription, + type SupplyData, +} from 'n8n-workflow'; + +import type { ClientOptions } from 'openai'; +import { ChatOpenAI } from 'langchain/chat_models/openai'; +import { logWrapper } from '../../../utils/logWrapper'; +import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; + +export class LmChatAzureOpenAi implements INodeType { + description: INodeTypeDescription = { + displayName: 'Azure OpenAI Chat Model', + // eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased + name: 'lmChatAzureOpenAi', + icon: 'file:azure.svg', + group: ['transform'], + version: 1, + description: 'For advanced usage with an AI chain', + defaults: { + name: 'Azure OpenAI Chat Model', + }, + codex: { + categories: ['AI'], + subcategories: { + AI: ['Language Models'], + }, + resources: { + primaryDocumentation: [ + { + url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatazureopenai/', + }, + ], + }, + }, + // eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node + inputs: [], + // eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong + outputs: [NodeConnectionType.AiLanguageModel], + outputNames: ['Model'], + credentials: [ + { + name: 'azureOpenAiApi', + required: true, + }, + ], + properties: [ + getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]), + { + displayName: 'Model (Deployment) Name', + name: 'model', + type: 'string', + description: 'The name of the model(deployment) to use', + default: '', + }, + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + options: [ + { + displayName: 'Frequency Penalty', + name: 'frequencyPenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim", + type: 'number', + }, + { + displayName: 'Maximum Number of Tokens', + name: 'maxTokens', + default: -1, + description: + 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).', + type: 'number', + typeOptions: { + maxValue: 32768, + }, + }, + { + displayName: 'Presence Penalty', + name: 'presencePenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics", + type: 'number', + }, + { + displayName: 'Sampling Temperature', + name: 'temperature', + default: 0.7, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', + type: 'number', + }, + { + displayName: 'Timeout', + name: 'timeout', + default: 60000, + description: 'Maximum amount of time a request is allowed to take in milliseconds', + type: 'number', + }, + { + displayName: 'Max Retries', + name: 'maxRetries', + default: 2, + description: 'Maximum number of retries to attempt', + type: 'number', + }, + { + displayName: 'Top P', + name: 'topP', + default: 1, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.', + type: 'number', + }, + ], + }, + ], + }; + + async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + const credentials = (await this.getCredentials('azureOpenAiApi')) as { + apiKey: string; + resourceName: string; + apiVersion: string; + }; + + const modelName = this.getNodeParameter('model', itemIndex) as string; + const options = this.getNodeParameter('options', itemIndex, {}) as { + frequencyPenalty?: number; + maxTokens?: number; + maxRetries: number; + timeout: number; + presencePenalty?: number; + temperature?: number; + topP?: number; + }; + + const configuration: ClientOptions = {}; + + const model = new ChatOpenAI({ + azureOpenAIApiDeploymentName: modelName, + azureOpenAIApiInstanceName: credentials.resourceName, + azureOpenAIApiKey: credentials.apiKey, + azureOpenAIApiVersion: credentials.apiVersion, + ...options, + timeout: options.timeout ?? 60000, + maxRetries: options.maxRetries ?? 2, + configuration, + }); + + return { + response: logWrapper(model, this), + }; + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/azure.svg b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/azure.svg new file mode 100644 index 0000000000000..bbbc6c33b3bcd --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/azure.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts index 568590db197dd..6ee592ef39905 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts @@ -28,8 +28,8 @@ class N8nStructuredOutputParser extends StructuredOutput const parsed = (await super.parse(text)) as object; return ( - get(parsed, `${STRUCTURED_OUTPUT_KEY}.${STRUCTURED_OUTPUT_OBJECT_KEY}`) ?? - get(parsed, `${STRUCTURED_OUTPUT_KEY}.${STRUCTURED_OUTPUT_ARRAY_KEY}`) ?? + get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_OBJECT_KEY]) ?? + get(parsed, [STRUCTURED_OUTPUT_KEY, STRUCTURED_OUTPUT_ARRAY_KEY]) ?? get(parsed, STRUCTURED_OUTPUT_KEY) ?? parsed ); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts index 945416c3c52c5..12b57b52cf71d 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts @@ -189,10 +189,9 @@ export class ToolCode implements INodeType { if (typeof response !== 'string') { // TODO: Do some more testing. Issues here should actually fail the workflow - executionError = new NodeOperationError( - this.getNode(), - `The code did not return a valid value. Instead of a string did a value of type '${typeof response}' get returned.`, - ); + executionError = new NodeOperationError(this.getNode(), 'Wrong output type returned', { + description: `The response property should be a string, but it is an ${typeof response}`, + }); response = `There was an error: "${executionError.message}"`; } diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts index 14e85111684f0..88c7578e16008 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts @@ -70,7 +70,7 @@ export class ToolWorkflow implements INodeType { { displayName: - 'The workflow will receive "query" as input and the output of the last node will be returned as response', + 'This tool will call the workflow you define below, and look in the last node for the response. The workflow needs to start with an Execute Workflow trigger', name: 'executeNotice', type: 'notice', default: '', @@ -87,9 +87,9 @@ export class ToolWorkflow implements INodeType { description: 'Load the workflow from the database by ID', }, { - name: 'Parameter', + name: 'Define Below', value: 'parameter', - description: 'Load the workflow from a parameter', + description: 'Pass the JSON code of a workflow', }, ], default: 'database', @@ -111,6 +111,7 @@ export class ToolWorkflow implements INodeType { default: '', required: true, description: 'The workflow to execute', + hint: 'Can be found in the URL of the workflow', }, // ---------------------------------- @@ -128,27 +129,30 @@ export class ToolWorkflow implements INodeType { source: ['parameter'], }, }, - default: '\n\n\n', + default: '\n\n\n\n\n\n\n\n\n', required: true, description: 'The workflow JSON code to execute', }, + // ---------------------------------- + // For all + // ---------------------------------- { - displayName: 'Response Property Name', + displayName: 'Field to Return', name: 'responsePropertyName', type: 'string', default: 'response', - description: 'The name of the property of the last node that will be returned as response', + required: true, + hint: 'The field in the last-executed node of the workflow that contains the response', + description: + 'Where to find the data that this tool should return. n8n will look in the output of the last-executed node of the workflow for a field with this name, and return its value.', }, - - // ---------------------------------- - // For all - // ---------------------------------- { - displayName: 'Workflow Values', + displayName: 'Extra Workflow Inputs', name: 'fields', placeholder: 'Add Value', type: 'fixedCollection', - description: 'Set the values which should be made available in the workflow', + description: + "These will be output by the 'execute workflow' trigger of the workflow being called", typeOptions: { multipleValues: true, sortable: true, @@ -296,6 +300,14 @@ export class ToolWorkflow implements INodeType { itemIndex, ) as string; + if (!responsePropertyName) { + throw new NodeOperationError(this.getNode(), "Field to return can't be empty", { + itemIndex, + description: + 'Enter the name of a field in the last node of the workflow that contains the response to return', + }); + } + const workflowInfo: IExecuteWorkflowInfo = {}; if (source === 'database') { // Read workflow from database @@ -399,10 +411,9 @@ export class ToolWorkflow implements INodeType { if (typeof response !== 'string') { // TODO: Do some more testing. Issues here should actually fail the workflow - executionError = new NodeOperationError( - this.getNode(), - `The code did not return a valid value. Instead of a string did a value of type '${typeof response}' get returned.`, - ); + executionError = new NodeOperationError(this.getNode(), 'Wrong output type returned', { + description: `The response property should be a string, but it is an ${typeof response}`, + }); response = `There was an error: "${executionError.message}"`; } diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index b61f0735eb0d4..e229ed4036c03 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -1,6 +1,6 @@ { "name": "@n8n/n8n-nodes-langchain", - "version": "0.11.0", + "version": "0.13.0", "description": "", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -27,6 +27,7 @@ "n8nNodesApiVersion": 1, "credentials": [ "dist/credentials/AnthropicApi.credentials.js", + "dist/credentials/AzureOpenAiApi.credentials.js", "dist/credentials/CohereApi.credentials.js", "dist/credentials/GooglePalmApi.credentials.js", "dist/credentials/HuggingFaceApi.credentials.js", @@ -53,11 +54,13 @@ "dist/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.js", "dist/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.js", "dist/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.js", + "dist/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.js", "dist/nodes/embeddings/EmbeddingsGooglePalm/EmbeddingsGooglePalm.node.js", "dist/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.js", "dist/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.js", "dist/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.js", "dist/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.js", + "dist/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.js", "dist/nodes/llms/LmGooglePalm/LmGooglePalm.node.js", "dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js", "dist/nodes/llms/LmChatGooglePalm/LmChatGooglePalm.node.js", @@ -128,6 +131,7 @@ "@huggingface/inference": "2.6.4", "@langchain/core": "0.1.8", "@langchain/mistralai": "0.0.6", + "@n8n/typeorm": "0.3.20", "@n8n/vm2": "3.9.20", "@pinecone-database/pinecone": "1.1.2", "@qdrant/js-client-rest": "1.7.0", @@ -149,9 +153,8 @@ "pdf-parse": "1.1.1", "pg": "8.11.3", "redis": "4.6.12", - "sqlite3": "5.1.6", + "sqlite3": "5.1.7", "temp": "0.9.4", - "typeorm": "0.3.17", "zod": "3.22.4", "zod-to-json-schema": "3.22.0" } diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index 0640cfbd9118f..e0f0b32087f7d 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -1,4 +1,6 @@ import type { IExecuteFunctions } from 'n8n-workflow'; +import { BaseChatModel } from 'langchain/chat_models/base'; +import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models'; export function getMetadataFiltersValues( ctx: IExecuteFunctions, @@ -14,3 +16,8 @@ export function getMetadataFiltersValues( return undefined; } + +// TODO: Remove this function once langchain package is updated to 0.1.x +export function isChatInstance(model: any): model is BaseChatModel | BaseChatModelCore { + return model instanceof BaseChatModel || model instanceof BaseChatModelCore; +} diff --git a/packages/@n8n/nodes-langchain/utils/logWrapper.ts b/packages/@n8n/nodes-langchain/utils/logWrapper.ts index 52a24ef6ac327..771c3e545250b 100644 --- a/packages/@n8n/nodes-langchain/utils/logWrapper.ts +++ b/packages/@n8n/nodes-langchain/utils/logWrapper.ts @@ -27,6 +27,7 @@ import { BaseOutputParser } from 'langchain/schema/output_parser'; import { isObject } from 'lodash'; import { N8nJsonLoader } from './N8nJsonLoader'; import { N8nBinaryLoader } from './N8nBinaryLoader'; +import { isChatInstance } from './helpers'; const errorsMap: { [key: string]: { message: string; description: string } } = { 'You exceeded your current quota, please check your plan and billing details.': { @@ -225,7 +226,7 @@ export function logWrapper( } // ========== BaseChatModel ========== - if (originalInstance instanceof BaseLLM || originalInstance instanceof BaseChatModel) { + if (originalInstance instanceof BaseLLM || isChatInstance(originalInstance)) { if (prop === '_generate' && '_generate' in target) { return async ( messages: BaseMessage[] & string[], diff --git a/packages/@n8n_io/eslint-config/base.js b/packages/@n8n_io/eslint-config/base.js index 4cb51b0cbb2f0..ce8cab90078cf 100644 --- a/packages/@n8n_io/eslint-config/base.js +++ b/packages/@n8n_io/eslint-config/base.js @@ -39,6 +39,9 @@ const config = (module.exports = { /** https://github.com/sindresorhus/eslint-plugin-unicorn */ 'eslint-plugin-unicorn', + + /** https://github.com/wix-incubator/eslint-plugin-lodash */ + 'eslint-plugin-lodash', ], extends: [ @@ -458,6 +461,8 @@ const config = (module.exports = { /** https://github.com/sindresorhus/eslint-plugin-unicorn/blob/main/docs/rules/no-useless-promise-resolve-reject.md */ 'unicorn/no-useless-promise-resolve-reject': 'error', + + 'lodash/path-style': ['error', 'as-needed'], }, overrides: [ diff --git a/packages/@n8n_io/eslint-config/package.json b/packages/@n8n_io/eslint-config/package.json index 333c198156f9c..0e93a3fbd551b 100644 --- a/packages/@n8n_io/eslint-config/package.json +++ b/packages/@n8n_io/eslint-config/package.json @@ -13,6 +13,7 @@ "eslint-config-prettier": "^9.0.0", "eslint-import-resolver-typescript": "^3.6.1", "eslint-plugin-import": "^2.29.0", + "eslint-plugin-lodash": "^7.4.0", "eslint-plugin-n8n-local-rules": "^1.0.0", "eslint-plugin-prettier": "^5.0.1", "eslint-plugin-unicorn": "^49.0.0", diff --git a/packages/cli/BREAKING-CHANGES.md b/packages/cli/BREAKING-CHANGES.md index db2ae5269f854..6d705100953f5 100644 --- a/packages/cli/BREAKING-CHANGES.md +++ b/packages/cli/BREAKING-CHANGES.md @@ -2,6 +2,16 @@ This list shows all the versions which include breaking changes and how to upgrade. +## 1.25.0 + +### What changed? + +If the `N8N_ENCRYPTION_KEY` environment variable on a main instance does not match the `encryptionKey` in the config file, the main instance will not initialize. If the `N8N_ENCRYPTION_KEY` environment variable is missing on a worker, the worker will not initialize. + +### When is action necessary? + +If passing an `N8N_ENCRYPTION_KEY` environment variable to the main instance, make sure it matches the `encryptionKey` in the config file. If you are using workers, pass the `N8N_ENCRYPTION_KEY` environment variable to them. + ## 1.24.0 ### What changed? diff --git a/packages/cli/package.json b/packages/cli/package.json index ea619306c0369..c8707936c30af 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "n8n", - "version": "1.26.0", + "version": "1.28.0", "description": "n8n Workflow Automation Tool", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", @@ -38,7 +38,7 @@ "test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --no-coverage", "test:mysql": "N8N_LOG_LEVEL=silent DB_TYPE=mysqldb DB_TABLE_PREFIX=test_ jest --no-coverage", "watch": "concurrently \"tsc -w -p tsconfig.build.json\" \"tsc-alias -w -p tsconfig.build.json\"", - "typeorm": "ts-node -T ../../node_modules/typeorm/cli.js" + "typeorm": "node ../../node_modules/typeorm/cli.js" }, "bin": { "n8n": "./bin/n8n" @@ -97,9 +97,10 @@ "@n8n/localtunnel": "2.1.0", "@n8n/n8n-nodes-langchain": "workspace:*", "@n8n/permissions": "workspace:*", + "@n8n/typeorm": "0.3.20", "@n8n_io/license-sdk": "2.9.1", "@oclif/core": "3.18.1", - "@rudderstack/rudder-sdk-node": "2.0.6", + "@rudderstack/rudder-sdk-node": "2.0.7", "@sentry/integrations": "7.87.0", "@sentry/node": "7.87.0", "axios": "1.6.7", @@ -146,7 +147,7 @@ "n8n-nodes-base": "workspace:*", "n8n-workflow": "workspace:*", "nanoid": "3.3.6", - "nodemailer": "6.8.0", + "nodemailer": "6.9.9", "oauth-1.0a": "2.2.6", "open": "7.4.2", "openapi-types": "10.0.0", @@ -156,27 +157,26 @@ "passport": "0.6.0", "passport-cookie": "1.0.9", "passport-jwt": "4.0.1", - "pg": "8.8.0", + "pg": "8.11.3", "picocolors": "1.0.0", "pkce-challenge": "3.0.0", "posthog-node": "3.2.1", "prom-client": "13.2.0", "psl": "1.9.0", "raw-body": "2.5.1", - "reflect-metadata": "0.1.13", + "reflect-metadata": "0.2.1", "replacestream": "4.0.3", "samlify": "2.8.9", "semver": "7.5.4", "shelljs": "0.8.5", "simple-git": "3.17.0", "source-map-support": "0.5.21", - "sqlite3": "5.1.6", + "sqlite3": "5.1.7", "sse-channel": "4.0.0", "sshpk": "1.17.0", "swagger-ui-express": "5.0.0", "syslog-client": "1.1.1", "typedi": "0.10.0", - "typeorm": "0.3.17", "uuid": "8.3.2", "validator": "13.7.0", "winston": "3.8.2", diff --git a/packages/cli/src/ActiveExecutions.ts b/packages/cli/src/ActiveExecutions.ts index f9f27f6e50a55..fdb97b6867e9c 100644 --- a/packages/cli/src/ActiveExecutions.ts +++ b/packages/cli/src/ActiveExecutions.ts @@ -1,5 +1,4 @@ import { Service } from 'typedi'; -import type { ChildProcess } from 'child_process'; import type PCancelable from 'p-cancelable'; import type { IDeferredPromise, @@ -7,7 +6,7 @@ import type { IRun, ExecutionStatus, } from 'n8n-workflow'; -import { ApplicationError, WorkflowOperationError, createDeferredPromise } from 'n8n-workflow'; +import { ApplicationError, createDeferredPromise, sleep } from 'n8n-workflow'; import type { ExecutionPayload, @@ -23,7 +22,7 @@ import { Logger } from '@/Logger'; @Service() export class ActiveExecutions { private activeExecutions: { - [index: string]: IExecutingWorkflowData; + [executionId: string]: IExecutingWorkflowData; } = {}; constructor( @@ -34,11 +33,7 @@ export class ActiveExecutions { /** * Add a new active execution */ - async add( - executionData: IWorkflowExecutionDataProcess, - process?: ChildProcess, - executionId?: string, - ): Promise { + async add(executionData: IWorkflowExecutionDataProcess, executionId?: string): Promise { let executionStatus: ExecutionStatus = executionId ? 'running' : 'new'; if (executionId === undefined) { // Is a new execution so save in DB @@ -82,7 +77,6 @@ export class ActiveExecutions { this.activeExecutions[executionId] = { executionData, - process, startedAt: new Date(), postExecutePromises: [], status: executionStatus, @@ -93,32 +87,17 @@ export class ActiveExecutions { /** * Attaches an execution - * */ attachWorkflowExecution(executionId: string, workflowExecution: PCancelable) { - const execution = this.activeExecutions[executionId]; - if (execution === undefined) { - throw new ApplicationError('No active execution found to attach to workflow execution to', { - extra: { executionId }, - }); - } - - execution.workflowExecution = workflowExecution; + this.getExecution(executionId).workflowExecution = workflowExecution; } attachResponsePromise( executionId: string, responsePromise: IDeferredPromise, ): void { - const execution = this.activeExecutions[executionId]; - if (execution === undefined) { - throw new ApplicationError('No active execution found to attach to workflow execution to', { - extra: { executionId }, - }); - } - - execution.responsePromise = responsePromise; + this.getExecution(executionId).responsePromise = responsePromise; } resolveResponsePromise(executionId: string, response: IExecuteResponsePromiseData): void { @@ -132,7 +111,6 @@ export class ActiveExecutions { /** * Remove an active execution - * */ remove(executionId: string, fullRunData?: IRun): void { const execution = this.activeExecutions[executionId]; @@ -141,7 +119,6 @@ export class ActiveExecutions { } // Resolve all the waiting promises - for (const promise of execution.postExecutePromises) { promise.resolve(fullRunData); } @@ -152,60 +129,31 @@ export class ActiveExecutions { /** * Forces an execution to stop - * - * @param {string} executionId The id of the execution to stop - * @param {string} timeout String 'timeout' given if stop due to timeout */ - async stopExecution(executionId: string, timeout?: string): Promise { + async stopExecution(executionId: string): Promise { const execution = this.activeExecutions[executionId]; if (execution === undefined) { // There is no execution running with that id return; } - // In case something goes wrong make sure that promise gets first - // returned that it gets then also resolved correctly. - if (execution.process !== undefined) { - // Workflow is running in subprocess - if (execution.process.connected) { - setTimeout(() => { - // execute on next event loop tick; - execution.process!.send({ - type: timeout || 'stopExecution', - }); - }, 1); - } - } else { - // Workflow is running in current process - execution.workflowExecution!.cancel(); - } + execution.workflowExecution!.cancel(); return await this.getPostExecutePromise(executionId); } /** - * Returns a promise which will resolve with the data of the execution - * with the given id - * - * @param {string} executionId The id of the execution to wait for + * Returns a promise which will resolve with the data of the execution with the given id */ async getPostExecutePromise(executionId: string): Promise { - const execution = this.activeExecutions[executionId]; - if (execution === undefined) { - throw new WorkflowOperationError(`There is no active execution with id "${executionId}".`); - } - // Create the promise which will be resolved when the execution finished const waitPromise = await createDeferredPromise(); - - execution.postExecutePromises.push(waitPromise); - + this.getExecution(executionId).postExecutePromises.push(waitPromise); return await waitPromise.promise(); } /** * Returns all the currently active executions - * */ getActiveExecutions(): IExecutionsCurrentSummary[] { const returnData: IExecutionsCurrentSummary[] = []; @@ -227,20 +175,42 @@ export class ActiveExecutions { return returnData; } - async setStatus(executionId: string, status: ExecutionStatus): Promise { - const execution = this.activeExecutions[executionId]; - if (execution === undefined) { - this.logger.debug( - `There is no active execution with id "${executionId}", can't update status to ${status}.`, + setStatus(executionId: string, status: ExecutionStatus) { + this.getExecution(executionId).status = status; + } + + getStatus(executionId: string): ExecutionStatus { + return this.getExecution(executionId).status; + } + + /** Wait for all active executions to finish */ + async shutdown(cancelAll = false) { + let executionIds = Object.keys(this.activeExecutions); + + if (cancelAll) { + const stopPromises = executionIds.map( + async (executionId) => await this.stopExecution(executionId), ); - return; + + await Promise.allSettled(stopPromises); } - execution.status = status; + let count = 0; + while (executionIds.length !== 0) { + if (count++ % 4 === 0) { + this.logger.info(`Waiting for ${executionIds.length} active executions to finish...`); + } + + await sleep(500); + executionIds = Object.keys(this.activeExecutions); + } } - getStatus(executionId: string): ExecutionStatus { + private getExecution(executionId: string): IExecutingWorkflowData { const execution = this.activeExecutions[executionId]; - return execution?.status ?? 'unknown'; + if (!execution) { + throw new ApplicationError('No active execution found', { extra: { executionId } }); + } + return execution; } } diff --git a/packages/cli/src/ActiveWorkflowRunner.ts b/packages/cli/src/ActiveWorkflowRunner.ts index ffeb8876d333f..d08e68f69833e 100644 --- a/packages/cli/src/ActiveWorkflowRunner.ts +++ b/packages/cli/src/ActiveWorkflowRunner.ts @@ -223,7 +223,8 @@ export class ActiveWorkflowRunner { } /** - * Clear workflow-defined webhooks from the `webhook_entity` table. + * Remove all webhooks of a workflow from the database, and + * deregister those webhooks from external services. */ async clearWebhooks(workflowId: string) { const workflowData = await this.workflowRepository.findOne({ @@ -418,9 +419,10 @@ export class ActiveWorkflowRunner { } /** - * Register as active in memory all workflows stored as `active`. + * Register as active in memory all workflows stored as `active`, + * only on instance init or (in multi-main setup) on leadership change. */ - async addActiveWorkflows(activationMode: WorkflowActivateMode) { + async addActiveWorkflows(activationMode: 'init' | 'leadershipChange') { const dbWorkflows = await this.workflowRepository.getAllActive(); if (dbWorkflows.length === 0) return; @@ -433,7 +435,9 @@ export class ActiveWorkflowRunner { for (const dbWorkflow of dbWorkflows) { try { - const wasActivated = await this.add(dbWorkflow.id, activationMode, dbWorkflow); + const wasActivated = await this.add(dbWorkflow.id, activationMode, dbWorkflow, { + shouldPublish: false, + }); if (wasActivated) { this.logger.verbose(`Successfully started workflow ${dbWorkflow.display()}`, { @@ -471,15 +475,21 @@ export class ActiveWorkflowRunner { } async clearAllActivationErrors() { + this.logger.debug('Clearing all activation errors'); + await this.activationErrorsService.clearAll(); } async addAllTriggerAndPollerBasedWorkflows() { + this.logger.debug('Adding all trigger- and poller-based workflows'); + await this.addActiveWorkflows('leadershipChange'); } @OnShutdown() async removeAllTriggerAndPollerBasedWorkflows() { + this.logger.debug('Removing all trigger- and poller-based workflows'); + await this.activeWorkflows.removeAllTriggerAndPollerBasedWorkflows(); } @@ -506,35 +516,24 @@ export class ActiveWorkflowRunner { workflowId: string, activationMode: WorkflowActivateMode, existingWorkflow?: WorkflowEntity, + { shouldPublish } = { shouldPublish: true }, ) { - let workflow: Workflow; + if (this.orchestrationService.isMultiMainSetupEnabled && shouldPublish) { + await this.orchestrationService.publish('add-webhooks-triggers-and-pollers', { + workflowId, + }); - let shouldAddWebhooks = true; - let shouldAddTriggersAndPollers = true; - - /** - * In a multi-main scenario, webhooks are stored in the database, while triggers - * and pollers are run only by the leader main instance. - * - * - During a regular workflow activation (i.e. not leadership change), only the - * leader should add webhooks to prevent duplicate insertions, and only the leader - * should handle triggers and pollers to prevent duplicate work. - * - * - During a leadership change, webhooks remain in storage and so need not be added - * again, and the new leader should take over the triggers and pollers that stopped - * running when the former leader became unresponsive. - */ - if (this.orchestrationService.isMultiMainSetupEnabled) { - if (activationMode !== 'leadershipChange') { - shouldAddWebhooks = this.orchestrationService.isLeader; - shouldAddTriggersAndPollers = this.orchestrationService.isLeader; - } else { - shouldAddWebhooks = false; - shouldAddTriggersAndPollers = this.orchestrationService.isLeader; - } + return; } - const shouldActivate = shouldAddWebhooks || shouldAddTriggersAndPollers; + let workflow: Workflow; + + const shouldAddWebhooks = this.orchestrationService.shouldAddWebhooks(activationMode); + const shouldAddTriggersAndPollers = this.orchestrationService.shouldAddTriggersAndPollers(); + + const shouldDisplayActivationMessage = + (shouldAddWebhooks || shouldAddTriggersAndPollers) && + ['init', 'leadershipChange'].includes(activationMode); try { const dbWorkflow = existingWorkflow ?? (await this.workflowRepository.findById(workflowId)); @@ -543,7 +542,7 @@ export class ActiveWorkflowRunner { throw new WorkflowActivationError(`Failed to find workflow with ID "${workflowId}"`); } - if (shouldActivate) { + if (shouldDisplayActivationMessage) { this.logger.info(` - ${dbWorkflow.display()}`); this.logger.debug(`Initializing active workflow ${dbWorkflow.display()} (startup)`, { workflowName: dbWorkflow.name, @@ -608,7 +607,7 @@ export class ActiveWorkflowRunner { // id of them in the static data. So make sure that data gets persisted. await this.workflowStaticDataService.saveStaticData(workflow); - return shouldActivate; + return shouldDisplayActivationMessage; } /** @@ -709,7 +708,21 @@ export class ActiveWorkflowRunner { */ // TODO: this should happen in a transaction async remove(workflowId: string) { - // Remove all the webhooks of the workflow + if (this.orchestrationService.isMultiMainSetupEnabled) { + try { + await this.clearWebhooks(workflowId); + } catch (error) { + ErrorReporter.error(error); + this.logger.error( + `Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`, + ); + } + + await this.orchestrationService.publish('remove-triggers-and-pollers', { workflowId }); + + return; + } + try { await this.clearWebhooks(workflowId); } catch (error) { @@ -727,11 +740,21 @@ export class ActiveWorkflowRunner { // if it's active in memory then it's a trigger // so remove from list of actives workflows - if (this.activeWorkflows.isActive(workflowId)) { - const removalSuccess = await this.activeWorkflows.remove(workflowId); - if (removalSuccess) { - this.logger.verbose(`Successfully deactivated workflow "${workflowId}"`, { workflowId }); - } + await this.removeWorkflowTriggersAndPollers(workflowId); + } + + /** + * Stop running active triggers and pollers for a workflow. + */ + async removeWorkflowTriggersAndPollers(workflowId: string) { + if (!this.activeWorkflows.isActive(workflowId)) return; + + const wasRemoved = await this.activeWorkflows.remove(workflowId); + + if (wasRemoved) { + this.logger.warn(`Removed triggers and pollers for workflow "${workflowId}"`, { + workflowId, + }); } } diff --git a/packages/cli/src/CredentialsHelper.ts b/packages/cli/src/CredentialsHelper.ts index 95b5a9a161fa7..ee8ffea484d60 100644 --- a/packages/cli/src/CredentialsHelper.ts +++ b/packages/cli/src/CredentialsHelper.ts @@ -2,64 +2,43 @@ /* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-return */ -/* eslint-disable @typescript-eslint/no-unsafe-call */ import { Service } from 'typedi'; import { Credentials, NodeExecuteFunctions } from 'n8n-core'; -import get from 'lodash/get'; import type { ICredentialDataDecryptedObject, - ICredentialsDecrypted, ICredentialsExpressionResolveValues, - ICredentialTestFunction, - ICredentialTestRequestData, IHttpRequestOptions, INode, INodeCredentialsDetails, - INodeCredentialTestResult, - INodeExecutionData, INodeParameters, INodeProperties, INodeType, IVersionedNodeType, IRequestOptionsSimplified, - IRunExecutionData, IWorkflowDataProxyAdditionalKeys, WorkflowExecuteMode, - ITaskDataConnections, IHttpRequestHelper, INodeTypeData, INodeTypes, IWorkflowExecuteAdditionalData, - ICredentialTestFunctions, IExecuteData, } from 'n8n-workflow'; -import { - ICredentialsHelper, - VersionedNodeType, - NodeHelpers, - RoutingNode, - Workflow, - ErrorReporterProxy as ErrorReporter, - ApplicationError, -} from 'n8n-workflow'; +import { ICredentialsHelper, NodeHelpers, Workflow, ApplicationError } from 'n8n-workflow'; import type { ICredentialsDb } from '@/Interfaces'; -import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; -import type { User } from '@db/entities/User'; + import type { CredentialsEntity } from '@db/entities/CredentialsEntity'; import { NodeTypes } from '@/NodeTypes'; import { CredentialTypes } from '@/CredentialTypes'; import { CredentialsOverwrites } from '@/CredentialsOverwrites'; import { RESPONSE_ERROR_MESSAGES } from './constants'; -import { isObjectLiteral } from './utils'; + import { Logger } from '@/Logger'; import { CredentialsRepository } from '@db/repositories/credentials.repository'; import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository'; import { CredentialNotFoundError } from './errors/credential-not-found.error'; -const { OAUTH2_CREDENTIAL_TEST_SUCCEEDED, OAUTH2_CREDENTIAL_TEST_FAILED } = RESPONSE_ERROR_MESSAGES; - const mockNode = { name: '', typeVersion: 1, @@ -475,310 +454,6 @@ export class CredentialsHelper extends ICredentialsHelper { await this.credentialsRepository.update(findQuery, newCredentialsData); } - private static hasAccessToken(credentialsDecrypted: ICredentialsDecrypted) { - const oauthTokenData = credentialsDecrypted?.data?.oauthTokenData; - - if (!isObjectLiteral(oauthTokenData)) return false; - - return 'access_token' in oauthTokenData; - } - - private getCredentialTestFunction( - credentialType: string, - ): ICredentialTestFunction | ICredentialTestRequestData | undefined { - // Check if test is defined on credentials - const type = this.credentialTypes.getByName(credentialType); - if (type.test) { - return { - testRequest: type.test, - }; - } - - const supportedNodes = this.credentialTypes.getSupportedNodes(credentialType); - for (const nodeName of supportedNodes) { - const node = this.nodeTypes.getByName(nodeName); - - // Always set to an array even if node is not versioned to not having - // to duplicate the logic - const allNodeTypes: INodeType[] = []; - if (node instanceof VersionedNodeType) { - // Node is versioned - allNodeTypes.push(...Object.values(node.nodeVersions)); - } else { - // Node is not versioned - allNodeTypes.push(node as INodeType); - } - - // Check each of the node versions for credential tests - for (const nodeType of allNodeTypes) { - // Check each of teh credentials - for (const { name, testedBy } of nodeType.description.credentials ?? []) { - if ( - name === credentialType && - this.credentialTypes.getParentTypes(name).includes('oAuth2Api') - ) { - return async function oauth2CredTest( - this: ICredentialTestFunctions, - cred: ICredentialsDecrypted, - ): Promise { - return CredentialsHelper.hasAccessToken(cred) - ? { - status: 'OK', - message: OAUTH2_CREDENTIAL_TEST_SUCCEEDED, - } - : { - status: 'Error', - message: OAUTH2_CREDENTIAL_TEST_FAILED, - }; - }; - } - - if (name === credentialType && !!testedBy) { - if (typeof testedBy === 'string') { - if (node instanceof VersionedNodeType) { - // The node is versioned. So check all versions for test function - // starting with the latest - const versions = Object.keys(node.nodeVersions).sort().reverse(); - for (const version of versions) { - const versionedNode = node.nodeVersions[parseInt(version, 10)]; - const credentialTest = versionedNode.methods?.credentialTest; - if (credentialTest && testedBy in credentialTest) { - return credentialTest[testedBy]; - } - } - } - // Test is defined as string which links to a function - return (node as unknown as INodeType).methods?.credentialTest![testedBy]; - } - - // Test is defined as JSON with a definition for the request to make - return { - nodeType, - testRequest: testedBy, - }; - } - } - } - } - - return undefined; - } - - async testCredentials( - user: User, - credentialType: string, - credentialsDecrypted: ICredentialsDecrypted, - ): Promise { - const credentialTestFunction = this.getCredentialTestFunction(credentialType); - if (credentialTestFunction === undefined) { - return { - status: 'Error', - message: 'No testing function found for this credential.', - }; - } - - if (credentialsDecrypted.data) { - try { - const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id); - credentialsDecrypted.data = this.applyDefaultsAndOverwrites( - additionalData, - credentialsDecrypted.data, - credentialType, - 'internal' as WorkflowExecuteMode, - undefined, - undefined, - user.hasGlobalScope('externalSecret:use'), - ); - } catch (error) { - this.logger.debug('Credential test failed', error); - return { - status: 'Error', - message: error.message.toString(), - }; - } - } - - if (typeof credentialTestFunction === 'function') { - // The credentials get tested via a function that is defined on the node - const credentialTestFunctions = NodeExecuteFunctions.getCredentialTestFunctions(); - - return credentialTestFunction.call(credentialTestFunctions, credentialsDecrypted); - } - - // Credentials get tested via request instructions - - // TODO: Temp workflows get created at multiple locations (for example also LoadNodeParameterOptions), - // check if some of them are identical enough that it can be combined - - let nodeType: INodeType; - if (credentialTestFunction.nodeType) { - nodeType = credentialTestFunction.nodeType; - } else { - nodeType = this.nodeTypes.getByNameAndVersion('n8n-nodes-base.noOp'); - } - - const node: INode = { - id: 'temp', - parameters: {}, - name: 'Temp-Node', - type: nodeType.description.name, - typeVersion: Array.isArray(nodeType.description.version) - ? nodeType.description.version.slice(-1)[0] - : nodeType.description.version, - position: [0, 0], - credentials: { - [credentialType]: { - id: credentialsDecrypted.id, - name: credentialsDecrypted.name, - }, - }, - }; - - const workflowData = { - nodes: [node], - connections: {}, - }; - - const nodeTypeCopy: INodeType = { - description: { - ...nodeType.description, - credentials: [ - { - name: credentialType, - required: true, - }, - ], - properties: [ - { - displayName: 'Temp', - name: 'temp', - type: 'string', - routing: { - request: credentialTestFunction.testRequest.request, - }, - default: '', - }, - ], - }, - }; - - mockNodesData[nodeTypeCopy.description.name] = { - sourcePath: '', - type: nodeTypeCopy, - }; - - const workflow = new Workflow({ - nodes: workflowData.nodes, - connections: workflowData.connections, - active: false, - nodeTypes: mockNodeTypes, - }); - - const mode = 'internal'; - const runIndex = 0; - const inputData: ITaskDataConnections = { - main: [[{ json: {} }]], - }; - const connectionInputData: INodeExecutionData[] = []; - const runExecutionData: IRunExecutionData = { - resultData: { - runData: {}, - }, - }; - - const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id, node.parameters); - - const routingNode = new RoutingNode( - workflow, - node, - connectionInputData, - runExecutionData ?? null, - additionalData, - mode, - ); - - let response: INodeExecutionData[][] | null | undefined; - - try { - response = await routingNode.runNode( - inputData, - runIndex, - nodeTypeCopy, - { node, data: {}, source: null }, - NodeExecuteFunctions, - credentialsDecrypted, - ); - } catch (error) { - ErrorReporter.error(error); - // Do not fail any requests to allow custom error messages and - // make logic easier - if (error.cause?.response) { - const errorResponseData = { - statusCode: error.cause.response.status, - statusMessage: error.cause.response.statusText, - }; - if (credentialTestFunction.testRequest.rules) { - // Special testing rules are defined so check all in order - for (const rule of credentialTestFunction.testRequest.rules) { - if (rule.type === 'responseCode') { - if (errorResponseData.statusCode === rule.properties.value) { - return { - status: 'Error', - message: rule.properties.message, - }; - } - } - } - } - - if (errorResponseData.statusCode < 199 || errorResponseData.statusCode > 299) { - // All requests with response codes that are not 2xx are treated by default as failed - return { - status: 'Error', - message: - errorResponseData.statusMessage || - `Received HTTP status code: ${errorResponseData.statusCode}`, - }; - } - } else if (error.cause?.code) { - return { - status: 'Error', - message: error.cause.code, - }; - } - this.logger.debug('Credential test failed', error); - return { - status: 'Error', - message: error.message.toString(), - }; - } finally { - delete mockNodesData[nodeTypeCopy.description.name]; - } - - if ( - credentialTestFunction.testRequest.rules && - Array.isArray(credentialTestFunction.testRequest.rules) - ) { - // Special testing rules are defined so check all in order - for (const rule of credentialTestFunction.testRequest.rules) { - if (rule.type === 'responseSuccessBody') { - const responseData = response![0][0].json; - if (get(responseData, rule.properties.key) === rule.properties.value) { - return { - status: 'Error', - message: rule.properties.message, - }; - } - } - } - } - - return { - status: 'OK', - message: 'Connection successful!', - }; - } - async credentialOwnedByOwner(nodeCredential: INodeCredentialsDetails): Promise { if (!nodeCredential.id) { return false; diff --git a/packages/cli/src/Db.ts b/packages/cli/src/Db.ts index 90f0e40de3fe4..d7869d96f2c92 100644 --- a/packages/cli/src/Db.ts +++ b/packages/cli/src/Db.ts @@ -1,7 +1,11 @@ /* eslint-disable @typescript-eslint/restrict-template-expressions */ import { Container } from 'typedi'; -import type { DataSourceOptions as ConnectionOptions, EntityManager, LoggerOptions } from 'typeorm'; -import { DataSource as Connection } from 'typeorm'; +import type { + DataSourceOptions as ConnectionOptions, + EntityManager, + LoggerOptions, +} from '@n8n/typeorm'; +import { DataSource as Connection } from '@n8n/typeorm'; import type { TlsOptions } from 'tls'; import { ApplicationError, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; diff --git a/packages/cli/src/Interfaces.ts b/packages/cli/src/Interfaces.ts index 06b4f9ae95a46..abcfa0bccb5a4 100644 --- a/packages/cli/src/Interfaces.ts +++ b/packages/cli/src/Interfaces.ts @@ -31,9 +31,6 @@ import type { WorkflowExecute } from 'n8n-core'; import type PCancelable from 'p-cancelable'; -import type { ChildProcess } from 'child_process'; - -import type { DatabaseType } from '@db/types'; import type { AuthProviderType } from '@db/entities/AuthIdentity'; import type { SharedCredentials } from '@db/entities/SharedCredentials'; import type { TagEntity } from '@db/entities/TagEntity'; @@ -192,7 +189,6 @@ export interface IExecutionsCurrentSummary { export interface IExecutingWorkflowData { executionData: IWorkflowExecutionDataProcess; - process?: ChildProcess; startedAt: Date; postExecutePromises: Array>; responsePromise?: IDeferredPromise; @@ -270,37 +266,6 @@ export interface IWebhookManager { executeWebhook(req: WebhookRequest, res: Response): Promise; } -export interface IDiagnosticInfo { - versionCli: string; - databaseType: DatabaseType; - notificationsEnabled: boolean; - disableProductionWebhooksOnMainProcess: boolean; - systemInfo: { - os: { - type?: string; - version?: string; - }; - memory?: number; - cpus: { - count?: number; - model?: string; - speed?: number; - }; - }; - executionVariables: { - [key: string]: string | number | boolean | undefined; - }; - deploymentType: string; - binaryDataMode: string; - smtp_set_up: boolean; - ldap_allowed: boolean; - saml_enabled: boolean; - binary_data_s3: boolean; - multi_main_setup_enabled: boolean; - licensePlanName?: string; - licenseTenantId?: number; -} - export interface ITelemetryUserDeletionData { user_id: string; target_user_old_status: 'active' | 'invited'; @@ -558,11 +523,6 @@ export interface IWorkflowErrorData { }; } -export interface IProcessMessageDataHook { - hook: string; - parameters: any[]; -} - export interface IWorkflowExecutionDataProcess { destinationNode?: string; restartExecutionId?: string; @@ -577,11 +537,6 @@ export interface IWorkflowExecutionDataProcess { userId: string; } -export interface IWorkflowExecutionDataProcessWithExecution extends IWorkflowExecutionDataProcess { - executionId: string; - userId: string; -} - export interface IWorkflowExecuteProcess { startedAt: Date; workflow: Workflow; diff --git a/packages/cli/src/InternalHooks.ts b/packages/cli/src/InternalHooks.ts index 33f0d16e23dcb..e5bc7c21e295f 100644 --- a/packages/cli/src/InternalHooks.ts +++ b/packages/cli/src/InternalHooks.ts @@ -1,5 +1,6 @@ import { Service } from 'typedi'; import { snakeCase } from 'change-case'; +import os from 'node:os'; import { get as pslGet } from 'psl'; import type { AuthenticationMethod, @@ -13,20 +14,23 @@ import type { import { TelemetryHelpers } from 'n8n-workflow'; import { InstanceSettings } from 'n8n-core'; +import config from '@/config'; import { N8N_VERSION } from '@/constants'; import type { AuthProviderType } from '@db/entities/AuthIdentity'; import type { GlobalRole, User } from '@db/entities/User'; import type { ExecutionMetadata } from '@db/entities/ExecutionMetadata'; import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository'; -import { MessageEventBus, type EventPayloadWorkflow } from '@/eventbus'; +import { WorkflowRepository } from '@db/repositories/workflow.repository'; +import type { EventPayloadWorkflow } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { determineFinalExecutionStatus } from '@/executionLifecycleHooks/shared/sharedHookFunctions'; import type { - IDiagnosticInfo, ITelemetryUserDeletionData, IWorkflowDb, IExecutionTrackProperties, IWorkflowExecutionDataProcess, } from '@/Interfaces'; +import { License } from '@/License'; import { EventsService } from '@/services/events.service'; import { NodeTypes } from '@/NodeTypes'; import { Telemetry } from '@/telemetry'; @@ -50,12 +54,14 @@ function userToPayload(user: User): { @Service() export class InternalHooks { constructor( - private telemetry: Telemetry, - private nodeTypes: NodeTypes, - private sharedWorkflowRepository: SharedWorkflowRepository, + private readonly telemetry: Telemetry, + private readonly nodeTypes: NodeTypes, + private readonly sharedWorkflowRepository: SharedWorkflowRepository, + private readonly workflowRepository: WorkflowRepository, eventsService: EventsService, private readonly instanceSettings: InstanceSettings, private readonly eventBus: MessageEventBus, + private readonly license: License, ) { eventsService.on( 'telemetry.onFirstProductionWorkflowSuccess', @@ -71,31 +77,69 @@ export class InternalHooks { await this.telemetry.init(); } - async onServerStarted( - diagnosticInfo: IDiagnosticInfo, - earliestWorkflowCreatedAt?: Date, - ): Promise { + async onServerStarted(): Promise { + const cpus = os.cpus(); + const binaryDataConfig = config.getEnv('binaryDataManager'); + + const isS3Selected = config.getEnv('binaryDataManager.mode') === 's3'; + const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3'); + const isS3Licensed = this.license.isBinaryDataS3Licensed(); + const authenticationMethod = config.getEnv('userManagement.authenticationMethod'); + const info = { - version_cli: diagnosticInfo.versionCli, - db_type: diagnosticInfo.databaseType, - n8n_version_notifications_enabled: diagnosticInfo.notificationsEnabled, - n8n_disable_production_main_process: diagnosticInfo.disableProductionWebhooksOnMainProcess, - system_info: diagnosticInfo.systemInfo, - execution_variables: diagnosticInfo.executionVariables, - n8n_deployment_type: diagnosticInfo.deploymentType, - n8n_binary_data_mode: diagnosticInfo.binaryDataMode, - smtp_set_up: diagnosticInfo.smtp_set_up, - ldap_allowed: diagnosticInfo.ldap_allowed, - saml_enabled: diagnosticInfo.saml_enabled, - license_plan_name: diagnosticInfo.licensePlanName, - license_tenant_id: diagnosticInfo.licenseTenantId, + version_cli: N8N_VERSION, + db_type: config.getEnv('database.type'), + n8n_version_notifications_enabled: config.getEnv('versionNotifications.enabled'), + n8n_disable_production_main_process: config.getEnv( + 'endpoints.disableProductionWebhooksOnMainProcess', + ), + system_info: { + os: { + type: os.type(), + version: os.version(), + }, + memory: os.totalmem() / 1024, + cpus: { + count: cpus.length, + model: cpus[0].model, + speed: cpus[0].speed, + }, + }, + execution_variables: { + executions_mode: config.getEnv('executions.mode'), + executions_timeout: config.getEnv('executions.timeout'), + executions_timeout_max: config.getEnv('executions.maxTimeout'), + executions_data_save_on_error: config.getEnv('executions.saveDataOnError'), + executions_data_save_on_success: config.getEnv('executions.saveDataOnSuccess'), + executions_data_save_on_progress: config.getEnv('executions.saveExecutionProgress'), + executions_data_save_manual_executions: config.getEnv( + 'executions.saveDataManualExecutions', + ), + executions_data_prune: config.getEnv('executions.pruneData'), + executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'), + }, + n8n_deployment_type: config.getEnv('deployment.type'), + n8n_binary_data_mode: binaryDataConfig.mode, + smtp_set_up: config.getEnv('userManagement.emails.mode') === 'smtp', + ldap_allowed: authenticationMethod === 'ldap', + saml_enabled: authenticationMethod === 'saml', + license_plan_name: this.license.getPlanName(), + license_tenant_id: config.getEnv('license.tenantId'), + binary_data_s3: isS3Available && isS3Selected && isS3Licensed, + multi_main_setup_enabled: config.getEnv('multiMainSetup.enabled'), }; + const firstWorkflow = await this.workflowRepository.findOne({ + select: ['createdAt'], + order: { createdAt: 'ASC' }, + where: {}, + }); + return await Promise.all([ this.telemetry.identify(info), this.telemetry.track('Instance started', { ...info, - earliest_workflow_created: earliestWorkflowCreatedAt, + earliest_workflow_created: firstWorkflow?.createdAt, }), ]); } @@ -301,6 +345,11 @@ export class InternalHooks { return; } + if (runData?.status === 'waiting') { + // No need to send telemetry or logs when the workflow hasn't finished yet. + return; + } + const promises = []; const telemetryProperties: IExecutionTrackProperties = { diff --git a/packages/cli/src/Ldap/ldap.service.ts b/packages/cli/src/Ldap/ldap.service.ts index c7eda15e3346a..0d7f45e58dfeb 100644 --- a/packages/cli/src/Ldap/ldap.service.ts +++ b/packages/cli/src/Ldap/ldap.service.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { QueryFailedError } from 'typeorm'; +import { QueryFailedError } from '@n8n/typeorm'; import type { Entry as LdapUser, ClientOptions } from 'ldapts'; import { Client } from 'ldapts'; import type { ConnectionOptions } from 'tls'; diff --git a/packages/cli/src/License.ts b/packages/cli/src/License.ts index 9400a0043ddfb..4990e248f7bd1 100644 --- a/packages/cli/src/License.ts +++ b/packages/cli/src/License.ts @@ -265,7 +265,7 @@ export class License { } isWorkerViewLicensed() { - return this.isFeatureEnabled(LICENSE_FEATURES.WORKER_VIEW); + return true; } getCurrentEntitlements() { diff --git a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts index bc1dcdc36f940..e6435efd88ca7 100644 --- a/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts +++ b/packages/cli/src/PublicApi/v1/handlers/workflows/workflows.handler.ts @@ -1,7 +1,7 @@ import type express from 'express'; import { Container } from 'typedi'; -import type { FindOptionsWhere } from 'typeorm'; -import { In } from 'typeorm'; +import type { FindOptionsWhere } from '@n8n/typeorm'; +import { In } from '@n8n/typeorm'; import { v4 as uuid } from 'uuid'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; diff --git a/packages/cli/src/Server.ts b/packages/cli/src/Server.ts index ea01467a49c52..ecea2e5e9a392 100644 --- a/packages/cli/src/Server.ts +++ b/packages/cli/src/Server.ts @@ -7,7 +7,6 @@ import { Container, Service } from 'typedi'; import assert from 'assert'; import { exec as callbackExec } from 'child_process'; import { access as fsAccess } from 'fs/promises'; -import os from 'os'; import { join as pathJoin } from 'path'; import { promisify } from 'util'; import cookieParser from 'cookie-parser'; @@ -35,7 +34,7 @@ import { N8N_VERSION, TEMPLATES_DIR, } from '@/constants'; -import { credentialsController } from '@/credentials/credentials.controller'; +import { CredentialsController } from '@/credentials/credentials.controller'; import type { CurlHelper } from '@/requests'; import { registerController } from '@/decorators'; import { AuthController } from '@/controllers/auth.controller'; @@ -55,7 +54,7 @@ import { WorkflowStatisticsController } from '@/controllers/workflowStatistics.c import { ExternalSecretsController } from '@/ExternalSecrets/ExternalSecrets.controller.ee'; import { ExecutionsController } from '@/executions/executions.controller'; import { isApiEnabled, loadPublicApiVersions } from '@/PublicApi'; -import type { ICredentialsOverwrite, IDiagnosticInfo } from '@/Interfaces'; +import type { ICredentialsOverwrite } from '@/Interfaces'; import { CredentialsOverwrites } from '@/CredentialsOverwrites'; import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; import * as ResponseHelper from '@/ResponseHelper'; @@ -68,21 +67,14 @@ import { setupAuthMiddlewares } from './middlewares'; import { isLdapEnabled } from './Ldap/helpers'; import { AbstractServer } from './AbstractServer'; import { PostHogClient } from './posthog'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { InternalHooks } from './InternalHooks'; -import { License } from './License'; import { SamlController } from './sso/saml/routes/saml.controller.ee'; import { SamlService } from './sso/saml/saml.service.ee'; import { VariablesController } from './environments/variables/variables.controller.ee'; -import { - isLdapCurrentAuthenticationMethod, - isSamlCurrentAuthenticationMethod, -} from './sso/ssoHelpers'; import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee'; import { SourceControlController } from '@/environments/sourceControl/sourceControl.controller.ee'; -import { WorkflowRepository } from '@db/repositories/workflow.repository'; - import { handleMfaDisable, isMfaFeatureEnabled } from './Mfa/helpers'; import type { FrontendService } from './services/frontend.service'; import { ActiveWorkflowsController } from './controllers/activeWorkflows.controller'; @@ -130,72 +122,11 @@ export class Server extends AbstractServer { await super.start(); this.logger.debug(`Server ID: ${this.uniqueInstanceId}`); - const cpus = os.cpus(); - const binaryDataConfig = config.getEnv('binaryDataManager'); - - const isS3Selected = config.getEnv('binaryDataManager.mode') === 's3'; - const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3'); - const isS3Licensed = Container.get(License).isBinaryDataS3Licensed(); - - const diagnosticInfo: IDiagnosticInfo = { - databaseType: config.getEnv('database.type'), - disableProductionWebhooksOnMainProcess: config.getEnv( - 'endpoints.disableProductionWebhooksOnMainProcess', - ), - notificationsEnabled: config.getEnv('versionNotifications.enabled'), - versionCli: N8N_VERSION, - systemInfo: { - os: { - type: os.type(), - version: os.version(), - }, - memory: os.totalmem() / 1024, - cpus: { - count: cpus.length, - model: cpus[0].model, - speed: cpus[0].speed, - }, - }, - executionVariables: { - executions_process: config.getEnv('executions.process'), - executions_mode: config.getEnv('executions.mode'), - executions_timeout: config.getEnv('executions.timeout'), - executions_timeout_max: config.getEnv('executions.maxTimeout'), - executions_data_save_on_error: config.getEnv('executions.saveDataOnError'), - executions_data_save_on_success: config.getEnv('executions.saveDataOnSuccess'), - executions_data_save_on_progress: config.getEnv('executions.saveExecutionProgress'), - executions_data_save_manual_executions: config.getEnv( - 'executions.saveDataManualExecutions', - ), - executions_data_prune: config.getEnv('executions.pruneData'), - executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'), - }, - deploymentType: config.getEnv('deployment.type'), - binaryDataMode: binaryDataConfig.mode, - smtp_set_up: config.getEnv('userManagement.emails.mode') === 'smtp', - ldap_allowed: isLdapCurrentAuthenticationMethod(), - saml_enabled: isSamlCurrentAuthenticationMethod(), - binary_data_s3: isS3Available && isS3Selected && isS3Licensed, - multi_main_setup_enabled: config.getEnv('multiMainSetup.enabled'), - licensePlanName: Container.get(License).getPlanName(), - licenseTenantId: config.getEnv('license.tenantId'), - }; - if (inDevelopment && process.env.N8N_DEV_RELOAD === 'true') { void this.loadNodesAndCredentials.setupHotReload(); } - void Container.get(WorkflowRepository) - .findOne({ - select: ['createdAt'], - order: { createdAt: 'ASC' }, - where: {}, - }) - .then( - async (workflow) => - await Container.get(InternalHooks).onServerStarted(diagnosticInfo, workflow?.createdAt), - ); - + void Container.get(InternalHooks).onServerStarted(); Container.get(CollaborationService); } @@ -231,6 +162,7 @@ export class Server extends AbstractServer { ActiveWorkflowsController, WorkflowsController, ExecutionsController, + CredentialsController, ]; if ( @@ -349,8 +281,6 @@ export class Server extends AbstractServer { await this.registerControllers(ignoredEndpoints); - this.app.use(`/${this.restEndpoint}/credentials`, credentialsController); - // ---------------------------------------- // Workflow with versions // -------------------------------- diff --git a/packages/cli/src/TestWebhooks.ts b/packages/cli/src/TestWebhooks.ts index fef109cb924e2..b51d38deb8e19 100644 --- a/packages/cli/src/TestWebhooks.ts +++ b/packages/cli/src/TestWebhooks.ts @@ -91,10 +91,12 @@ export class TestWebhooks implements IWebhookManager { }); } - const { destinationNode, sessionId, workflowEntity } = registration; + const { destinationNode, sessionId, workflowEntity, webhook: testWebhook } = registration; const workflow = this.toWorkflow(workflowEntity); + if (testWebhook.staticData) workflow.setTestStaticData(testWebhook.staticData); + const workflowStartNode = workflow.getNode(webhook.node); if (workflowStartNode === null) { @@ -405,14 +407,7 @@ export class TestWebhooks implements IWebhookManager { connections: workflowEntity.connections, active: false, nodeTypes: this.nodeTypes, - - /** - * `staticData` in the original workflow entity has production webhook IDs. - * Since we are creating here a temporary workflow only for a test webhook, - * `staticData` from the original workflow entity should not be transferred. - */ - staticData: undefined, - + staticData: {}, settings: workflowEntity.settings, }); } diff --git a/packages/cli/src/UserManagement/PermissionChecker.ts b/packages/cli/src/UserManagement/PermissionChecker.ts index 7b6261c4d6d18..de1e60f610279 100644 --- a/packages/cli/src/UserManagement/PermissionChecker.ts +++ b/packages/cli/src/UserManagement/PermissionChecker.ts @@ -39,24 +39,20 @@ export class PermissionChecker { if (user.hasGlobalScope('workflow:execute')) return; + const isSharingEnabled = this.license.isSharingEnabled(); + // allow if all creds used in this workflow are a subset of // all creds accessible to users who have access to this workflow let workflowUserIds = [userId]; - if (workflow.id && this.license.isSharingEnabled()) { - const workflowSharings = await this.sharedWorkflowRepository.find({ - relations: ['workflow'], - where: { workflowId: workflow.id }, - select: ['userId'], - }); - workflowUserIds = workflowSharings.map((s) => s.userId); + if (workflow.id && isSharingEnabled) { + workflowUserIds = await this.sharedWorkflowRepository.getSharedUserIds(workflow.id); } - const credentialSharings = - await this.sharedCredentialsRepository.findOwnedSharings(workflowUserIds); - - const accessibleCredIds = credentialSharings.map((s) => s.credentialsId); + const accessibleCredIds = isSharingEnabled + ? await this.sharedCredentialsRepository.getAccessibleCredentialIds(workflowUserIds) + : await this.sharedCredentialsRepository.getOwnedCredentialIds(workflowUserIds); const inaccessibleCredIds = workflowCredIds.filter((id) => !accessibleCredIds.includes(id)); diff --git a/packages/cli/src/WebhookHelpers.ts b/packages/cli/src/WebhookHelpers.ts index 999740a19ba65..c429a06aa2f65 100644 --- a/packages/cli/src/WebhookHelpers.ts +++ b/packages/cli/src/WebhookHelpers.ts @@ -9,8 +9,7 @@ import type express from 'express'; import { Container } from 'typedi'; import get from 'lodash/get'; -import stream from 'stream'; -import { promisify } from 'util'; +import { pipeline } from 'stream/promises'; import formidable from 'formidable'; import { BinaryDataService, NodeExecuteFunctions } from 'n8n-core'; @@ -65,8 +64,6 @@ import { NotFoundError } from './errors/response-errors/not-found.error'; import { InternalServerError } from './errors/response-errors/internal-server.error'; import { UnprocessableRequestError } from './errors/response-errors/unprocessable.error'; -const pipeline = promisify(stream.pipeline); - export const WEBHOOK_METHODS: IHttpRequestMethods[] = [ 'DELETE', 'GET', diff --git a/packages/cli/src/WorkflowExecuteAdditionalData.ts b/packages/cli/src/WorkflowExecuteAdditionalData.ts index 9eef7ab40af04..cc146008f44ea 100644 --- a/packages/cli/src/WorkflowExecuteAdditionalData.ts +++ b/packages/cli/src/WorkflowExecuteAdditionalData.ts @@ -583,7 +583,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks { const workflowStatusFinal = determineFinalExecutionStatus(fullRunData); - if (workflowStatusFinal !== 'success') { + if (workflowStatusFinal !== 'success' && workflowStatusFinal !== 'waiting') { executeErrorWorkflow( this.workflowData, fullRunData, @@ -930,11 +930,7 @@ export function setExecutionStatus(status: ExecutionStatus) { return; } logger.debug(`Setting execution status for ${this.executionId} to "${status}"`); - Container.get(ActiveExecutions) - .setStatus(this.executionId, status) - .catch((error) => { - logger.debug(`Setting execution status "${status}" failed: ${error.message}`); - }); + Container.get(ActiveExecutions).setStatus(this.executionId, status); } export function sendDataToUI(type: string, data: IDataObject | IDataObject[]) { diff --git a/packages/cli/src/WorkflowRunner.ts b/packages/cli/src/WorkflowRunner.ts index ebd5a5a0cc476..6f4bc6eee6e30 100644 --- a/packages/cli/src/WorkflowRunner.ts +++ b/packages/cli/src/WorkflowRunner.ts @@ -3,7 +3,6 @@ /* eslint-disable @typescript-eslint/no-shadow */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */ import { Container, Service } from 'typedi'; -import type { IProcessMessage } from 'n8n-core'; import { WorkflowExecute } from 'n8n-core'; import type { @@ -22,29 +21,20 @@ import { } from 'n8n-workflow'; import PCancelable from 'p-cancelable'; -import { join as pathJoin } from 'path'; -import { fork } from 'child_process'; import { ActiveExecutions } from '@/ActiveExecutions'; import config from '@/config'; import { ExecutionRepository } from '@db/repositories/execution.repository'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { ExecutionDataRecoveryService } from '@/eventbus/executionDataRecovery.service'; import { ExternalHooks } from '@/ExternalHooks'; -import type { - IExecutionResponse, - IProcessMessageDataHook, - IWorkflowExecutionDataProcess, - IWorkflowExecutionDataProcessWithExecution, -} from '@/Interfaces'; +import type { IExecutionResponse, IWorkflowExecutionDataProcess } from '@/Interfaces'; import { NodeTypes } from '@/NodeTypes'; import type { Job, JobData, JobResponse } from '@/Queue'; import { Queue } from '@/Queue'; -import { decodeWebhookResponse } from '@/helpers/decodeWebhookResponse'; import * as WorkflowHelpers from '@/WorkflowHelpers'; import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; import { generateFailedExecutionFromError } from '@/WorkflowHelpers'; -import { initErrorHandling } from '@/ErrorReporting'; import { PermissionChecker } from '@/UserManagement/PermissionChecker'; import { InternalHooks } from '@/InternalHooks'; import { Logger } from '@/Logger'; @@ -56,8 +46,6 @@ export class WorkflowRunner { private executionsMode = config.getEnv('executions.mode'); - private executionsProcess = config.getEnv('executions.process'); - constructor( private readonly logger: Logger, private readonly activeExecutions: ActiveExecutions, @@ -66,14 +54,10 @@ export class WorkflowRunner { private readonly workflowStaticDataService: WorkflowStaticDataService, private readonly nodeTypes: NodeTypes, private readonly permissionChecker: PermissionChecker, - ) {} - - /** The process did send a hook message so execute the appropriate hook */ - private async processHookMessage( - workflowHooks: WorkflowHooks, - hookData: IProcessMessageDataHook, ) { - await workflowHooks.executeHookFunctions(hookData.hook, hookData.parameters); + if (this.executionsMode === 'queue') { + this.jobQueue = Container.get(Queue); + } } /** The process did error */ @@ -169,31 +153,21 @@ export class WorkflowRunner { data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean, - executionId?: string, + restartExecutionId?: string, responsePromise?: IDeferredPromise, ): Promise { - await initErrorHandling(); - - if (this.executionsMode === 'queue') { - this.jobQueue = Container.get(Queue); + // Register a new execution + const executionId = await this.activeExecutions.add(data, restartExecutionId); + if (responsePromise) { + this.activeExecutions.attachResponsePromise(executionId, responsePromise); } if (this.executionsMode === 'queue' && data.executionMode !== 'manual') { // Do not run "manual" executions in bull because sending events to the // frontend would not be possible - executionId = await this.enqueueExecution( - data, - loadStaticData, - realtime, - executionId, - responsePromise, - ); + await this.enqueueExecution(executionId, data, loadStaticData, realtime); } else { - if (this.executionsProcess === 'main') { - executionId = await this.runMainProcess(data, loadStaticData, executionId, responsePromise); - } else { - executionId = await this.runSubprocess(data, loadStaticData, executionId, responsePromise); - } + await this.runMainProcess(executionId, data, loadStaticData, executionId); void Container.get(InternalHooks).onWorkflowBeforeExecute(executionId, data); } @@ -208,7 +182,7 @@ export class WorkflowRunner { postExecutePromise .then(async (executionData) => { void Container.get(InternalHooks).onWorkflowPostExecute( - executionId!, + executionId, data.workflowData, executionData, data.userId, @@ -237,11 +211,11 @@ export class WorkflowRunner { /** Run the workflow in current process */ private async runMainProcess( + executionId: string, data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, restartExecutionId?: string, - responsePromise?: IDeferredPromise, - ): Promise { + ): Promise { const workflowId = data.workflowData.id; if (loadStaticData === true && workflowId) { data.workflowData.staticData = @@ -280,10 +254,9 @@ export class WorkflowRunner { undefined, workflowTimeout <= 0 ? undefined : Date.now() + workflowTimeout * 1000, ); + // TODO: set this in queue mode as well additionalData.restartExecutionId = restartExecutionId; - // Register the active execution - const executionId = await this.activeExecutions.add(data, undefined, restartExecutionId); additionalData.executionId = executionId; this.logger.verbose( @@ -313,14 +286,12 @@ export class WorkflowRunner { ); await additionalData.hooks.executeHookFunctions('workflowExecuteAfter', [failedExecution]); this.activeExecutions.remove(executionId, failedExecution); - return executionId; + return; } additionalData.hooks.hookFunctions.sendResponse = [ async (response: IExecuteResponsePromiseData): Promise => { - if (responsePromise) { - responsePromise.resolve(response); - } + this.activeExecutions.resolveResponsePromise(executionId, response); }, ]; @@ -380,7 +351,7 @@ export class WorkflowRunner { if (workflowTimeout > 0) { const timeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds executionTimeout = setTimeout(() => { - void this.activeExecutions.stopExecution(executionId, 'timeout'); + void this.activeExecutions.stopExecution(executionId); }, timeout); } @@ -414,25 +385,14 @@ export class WorkflowRunner { throw error; } - - return executionId; } private async enqueueExecution( + executionId: string, data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean, - restartExecutionId?: string, - responsePromise?: IDeferredPromise, - ): Promise { - // TODO: If "loadStaticData" is set to true it has to load data new on worker - - // Register the active execution - const executionId = await this.activeExecutions.add(data, undefined, restartExecutionId); - if (responsePromise) { - this.activeExecutions.attachResponsePromise(executionId, responsePromise); - } - + ): Promise { const jobData: JobData = { executionId, loadStaticData: !!loadStaticData, @@ -624,190 +584,5 @@ export class WorkflowRunner { }); this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution); - return executionId; - } - - /** Run the workflow in a child-process */ - private async runSubprocess( - data: IWorkflowExecutionDataProcess, - loadStaticData?: boolean, - restartExecutionId?: string, - responsePromise?: IDeferredPromise, - ): Promise { - const workflowId = data.workflowData.id; - let startedAt = new Date(); - const subprocess = fork(pathJoin(__dirname, 'WorkflowRunnerProcess.js')); - - if (loadStaticData === true && workflowId) { - data.workflowData.staticData = - await this.workflowStaticDataService.getStaticDataById(workflowId); - } - - data.restartExecutionId = restartExecutionId; - - // Register the active execution - const executionId = await this.activeExecutions.add(data, subprocess, restartExecutionId); - - (data as unknown as IWorkflowExecutionDataProcessWithExecution).executionId = executionId; - await this.executionRepository.updateStatus(executionId, 'running'); - - const workflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId); - - try { - // Send all data to subprocess it needs to run the workflow - subprocess.send({ type: 'startWorkflow', data } as IProcessMessage); - } catch (error) { - await this.processError(error, new Date(), data.executionMode, executionId, workflowHooks); - return executionId; - } - - // Start timeout for the execution - let executionTimeout: NodeJS.Timeout; - - const workflowSettings = data.workflowData.settings ?? {}; - let workflowTimeout = workflowSettings.executionTimeout ?? config.getEnv('executions.timeout'); // initialize with default - - const processTimeoutFunction = (timeout: number) => { - void this.activeExecutions.stopExecution(executionId, 'timeout'); - executionTimeout = setTimeout(() => subprocess.kill(), Math.max(timeout * 0.2, 5000)); // minimum 5 seconds - }; - - if (workflowTimeout > 0) { - workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds - // Start timeout already now but give process at least 5 seconds to start. - // Without it could would it be possible that the workflow executions times out before it even got started if - // the timeout time is very short as the process start time can be quite long. - executionTimeout = setTimeout( - processTimeoutFunction, - Math.max(5000, workflowTimeout), - workflowTimeout, - ); - } - - // Create a list of child spawned executions - // If after the child process exits we have - // outstanding executions, we remove them - const childExecutionIds: string[] = []; - - // Listen to data from the subprocess - subprocess.on('message', async (message: IProcessMessage) => { - this.logger.debug( - `Received child process message of type ${message.type} for execution ID ${executionId}.`, - { executionId }, - ); - if (message.type === 'start') { - // Now that the execution actually started set the timeout again so that does not time out to early. - startedAt = new Date(); - if (workflowTimeout > 0) { - clearTimeout(executionTimeout); - executionTimeout = setTimeout(processTimeoutFunction, workflowTimeout, workflowTimeout); - } - } else if (message.type === 'end') { - clearTimeout(executionTimeout); - this.activeExecutions.remove(executionId, message.data.runData); - } else if (message.type === 'sendResponse') { - if (responsePromise) { - responsePromise.resolve(decodeWebhookResponse(message.data.response)); - } - } else if (message.type === 'sendDataToUI') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - WorkflowExecuteAdditionalData.sendDataToUI.bind({ sessionId: data.sessionId })( - message.data.type, - message.data.data, - ); - } else if (message.type === 'processError') { - clearTimeout(executionTimeout); - const executionError = message.data.executionError as ExecutionError; - await this.processError( - executionError, - startedAt, - data.executionMode, - executionId, - workflowHooks, - ); - } else if (message.type === 'processHook') { - await this.processHookMessage(workflowHooks, message.data as IProcessMessageDataHook); - } else if (message.type === 'timeout') { - // Execution timed out and its process has been terminated - const timeoutError = new WorkflowOperationError('Workflow execution timed out!'); - - // No need to add hook here as the subprocess takes care of calling the hooks - await this.processError(timeoutError, startedAt, data.executionMode, executionId); - } else if (message.type === 'startExecution') { - const executionId = await this.activeExecutions.add(message.data.runData); - childExecutionIds.push(executionId); - subprocess.send({ type: 'executionId', data: { executionId } } as IProcessMessage); - } else if (message.type === 'finishExecution') { - const executionIdIndex = childExecutionIds.indexOf(message.data.executionId); - if (executionIdIndex !== -1) { - childExecutionIds.splice(executionIdIndex, 1); - } - - if (message.data.result === undefined) { - const noDataError = new WorkflowOperationError('Workflow finished with no result data'); - const subWorkflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain( - data, - message.data.executionId, - ); - await this.processError( - noDataError, - startedAt, - data.executionMode, - message.data?.executionId, - subWorkflowHooks, - ); - } else { - this.activeExecutions.remove(message.data.executionId, message.data.result); - } - } - }); - - // Also get informed when the processes does exit especially when it did crash or timed out - subprocess.on('exit', async (code, signal) => { - if (signal === 'SIGTERM') { - this.logger.debug(`Subprocess for execution ID ${executionId} timed out.`, { executionId }); - // Execution timed out and its process has been terminated - const timeoutError = new WorkflowOperationError('Workflow execution timed out!'); - - await this.processError( - timeoutError, - startedAt, - data.executionMode, - executionId, - workflowHooks, - ); - } else if (code !== 0) { - this.logger.debug( - `Subprocess for execution ID ${executionId} finished with error code ${code}.`, - { executionId }, - ); - // Process did exit with error code, so something went wrong. - const executionError = new WorkflowOperationError( - 'Workflow execution process crashed for an unknown reason!', - ); - - await this.processError( - executionError, - startedAt, - data.executionMode, - executionId, - workflowHooks, - ); - } - - for (const executionId of childExecutionIds) { - // When the child process exits, if we still have - // pending child executions, we mark them as finished - // They will display as unknown to the user - // Instead of pending forever as executing when it - // actually isn't anymore. - - this.activeExecutions.remove(executionId); - } - - clearTimeout(executionTimeout); - }); - - return executionId; } } diff --git a/packages/cli/src/WorkflowRunnerProcess.ts b/packages/cli/src/WorkflowRunnerProcess.ts deleted file mode 100644 index a4c1e9df83397..0000000000000 --- a/packages/cli/src/WorkflowRunnerProcess.ts +++ /dev/null @@ -1,506 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ -/* eslint-disable @typescript-eslint/no-unsafe-member-access */ -/* eslint-disable @typescript-eslint/no-unsafe-assignment */ -/* eslint-disable @typescript-eslint/no-shadow */ -/* eslint-disable @typescript-eslint/no-use-before-define */ -/* eslint-disable @typescript-eslint/unbound-method */ -import 'source-map-support/register'; -import 'reflect-metadata'; -import { setDefaultResultOrder } from 'dns'; - -import { Container } from 'typedi'; -import type { IProcessMessage } from 'n8n-core'; -import { BinaryDataService, WorkflowExecute } from 'n8n-core'; - -import type { - ExecutionError, - IDataObject, - IExecuteResponsePromiseData, - IExecuteWorkflowInfo, - INode, - INodeExecutionData, - IRun, - ITaskData, - IWorkflowExecuteAdditionalData, - IWorkflowExecuteHooks, - IWorkflowSettings, - NodeOperationError, - WorkflowExecuteMode, -} from 'n8n-workflow'; -import { - ErrorReporterProxy as ErrorReporter, - Workflow, - WorkflowHooks, - WorkflowOperationError, -} from 'n8n-workflow'; -import * as Db from '@/Db'; -import { ExternalHooks } from '@/ExternalHooks'; -import type { - IWorkflowExecuteProcess, - IWorkflowExecutionDataProcessWithExecution, -} from '@/Interfaces'; -import { NodeTypes } from '@/NodeTypes'; -import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; -import * as WebhookHelpers from '@/WebhookHelpers'; -import * as WorkflowHelpers from '@/WorkflowHelpers'; -import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; -import { Logger } from '@/Logger'; - -import config from '@/config'; -import { generateFailedExecutionFromError } from '@/WorkflowHelpers'; -import { initErrorHandling } from '@/ErrorReporting'; -import { PermissionChecker } from '@/UserManagement/PermissionChecker'; -import { License } from '@/License'; -import { InternalHooks } from '@/InternalHooks'; -import { PostHogClient } from '@/posthog'; - -if (process.env.NODEJS_PREFER_IPV4 === 'true') { - setDefaultResultOrder('ipv4first'); -} - -class WorkflowRunnerProcess { - data: IWorkflowExecutionDataProcessWithExecution | undefined; - - logger: Logger; - - startedAt = new Date(); - - workflow: Workflow | undefined; - - workflowExecute: WorkflowExecute | undefined; - - // eslint-disable-next-line @typescript-eslint/no-invalid-void-type - executionIdCallback: (executionId: string) => void | undefined; - - childExecutions: { - [key: string]: IWorkflowExecuteProcess; - } = {}; - - static async stopProcess() { - setTimeout(() => { - // Attempt a graceful shutdown, giving executions 30 seconds to finish - process.exit(0); - }, 30000); - } - - constructor() { - this.logger = Container.get(Logger); - } - - async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise { - process.once('SIGTERM', WorkflowRunnerProcess.stopProcess); - process.once('SIGINT', WorkflowRunnerProcess.stopProcess); - - await initErrorHandling(); - - this.data = inputData; - const { userId } = inputData; - - this.logger.verbose('Initializing n8n sub-process', { - pid: process.pid, - workflowId: this.data.workflowData.id, - }); - - this.startedAt = new Date(); - - // Init db since we need to read the license. - await Db.init(); - - const nodeTypes = Container.get(NodeTypes); - await Container.get(LoadNodesAndCredentials).init(); - - // Load all external hooks - const externalHooks = Container.get(ExternalHooks); - await externalHooks.init(); - - await Container.get(PostHogClient).init(); - await Container.get(InternalHooks).init(); - - const binaryDataConfig = config.getEnv('binaryDataManager'); - await Container.get(BinaryDataService).init(binaryDataConfig); - - const license = Container.get(License); - await license.init(); - - const workflowSettings = this.data.workflowData.settings ?? {}; - - // Start timeout for the execution - let workflowTimeout = workflowSettings.executionTimeout ?? config.getEnv('executions.timeout'); // initialize with default - if (workflowTimeout > 0) { - workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')); - } - - this.workflow = new Workflow({ - id: this.data.workflowData.id, - name: this.data.workflowData.name, - nodes: this.data.workflowData.nodes, - connections: this.data.workflowData.connections, - active: this.data.workflowData.active, - nodeTypes, - staticData: this.data.workflowData.staticData, - settings: this.data.workflowData.settings, - pinData: this.data.pinData, - }); - try { - await Container.get(PermissionChecker).check(this.workflow, userId); - } catch (error) { - const caughtError = error as NodeOperationError; - const failedExecutionData = generateFailedExecutionFromError( - this.data.executionMode, - caughtError, - caughtError.node, - ); - - // Force the `workflowExecuteAfter` hook to run since - // it's the one responsible for saving the execution - await this.sendHookToParentProcess('workflowExecuteAfter', [failedExecutionData]); - // Interrupt the workflow execution since we don't have all necessary creds. - return failedExecutionData; - } - const additionalData = await WorkflowExecuteAdditionalData.getBase( - userId, - undefined, - workflowTimeout <= 0 ? undefined : Date.now() + workflowTimeout * 1000, - ); - additionalData.restartExecutionId = this.data.restartExecutionId; - additionalData.hooks = this.getProcessForwardHooks(); - - additionalData.hooks.hookFunctions.sendResponse = [ - async (response: IExecuteResponsePromiseData): Promise => { - await sendToParentProcess('sendResponse', { - response: WebhookHelpers.encodeWebhookResponse(response), - }); - }, - ]; - - additionalData.executionId = inputData.executionId; - - additionalData.setExecutionStatus = WorkflowExecuteAdditionalData.setExecutionStatus.bind({ - executionId: inputData.executionId, - }); - - additionalData.sendDataToUI = async (type: string, data: IDataObject | IDataObject[]) => { - if (workflowRunner.data!.executionMode !== 'manual') { - return; - } - - try { - await sendToParentProcess('sendDataToUI', { type, data }); - } catch (error) { - ErrorReporter.error(error); - this.logger.error( - `There was a problem sending UI data to parent process: "${error.message}"`, - ); - } - }; - const executeWorkflowFunction = additionalData.executeWorkflow; - additionalData.executeWorkflow = async ( - workflowInfo: IExecuteWorkflowInfo, - additionalData: IWorkflowExecuteAdditionalData, - options: { - parentWorkflowId: string; - inputData?: INodeExecutionData[]; - parentWorkflowSettings?: IWorkflowSettings; - }, - ): Promise | IRun> => { - const workflowData = await WorkflowExecuteAdditionalData.getWorkflowData( - workflowInfo, - options.parentWorkflowId, - options.parentWorkflowSettings, - ); - const runData = await WorkflowExecuteAdditionalData.getRunData( - workflowData, - additionalData.userId, - options?.inputData, - ); - await sendToParentProcess('startExecution', { runData }); - const executionId: string = await new Promise((resolve) => { - this.executionIdCallback = (executionId: string) => { - resolve(executionId); - }; - }); - - void Container.get(InternalHooks).onWorkflowBeforeExecute(executionId || '', runData); - - let result: IRun; - try { - const executeWorkflowFunctionOutput = (await executeWorkflowFunction( - workflowInfo, - additionalData, - { - parentWorkflowId: options?.parentWorkflowId, - inputData: options?.inputData, - parentExecutionId: executionId, - loadedWorkflowData: workflowData, - loadedRunData: runData, - parentWorkflowSettings: options?.parentWorkflowSettings, - }, - )) as { workflowExecute: WorkflowExecute; workflow: Workflow } as IWorkflowExecuteProcess; - const { workflowExecute } = executeWorkflowFunctionOutput; - this.childExecutions[executionId] = executeWorkflowFunctionOutput; - const { workflow } = executeWorkflowFunctionOutput; - result = await workflowExecute.processRunExecutionData(workflow); - await externalHooks.run('workflow.postExecute', [result, workflowData, executionId]); - void Container.get(InternalHooks).onWorkflowPostExecute( - executionId, - workflowData, - result, - additionalData.userId, - ); - await sendToParentProcess('finishExecution', { executionId, result }); - delete this.childExecutions[executionId]; - } catch (e) { - await sendToParentProcess('finishExecution', { executionId }); - delete this.childExecutions[executionId]; - // Throw same error we had - throw e; - } - - await sendToParentProcess('finishExecution', { executionId, result }); - - const returnData = WorkflowHelpers.getDataLastExecutedNodeData(result); - - if (returnData!.error) { - const error = new Error(returnData!.error.message); - error.stack = returnData!.error.stack; - throw error; - } - - return returnData!.data!.main; - }; - - if (this.data.executionData !== undefined) { - this.workflowExecute = new WorkflowExecute( - additionalData, - this.data.executionMode, - this.data.executionData, - ); - return await this.workflowExecute.processRunExecutionData(this.workflow); - } - if ( - this.data.runData === undefined || - this.data.startNodes === undefined || - this.data.startNodes.length === 0 - ) { - // Execute all nodes - - const startNode = WorkflowHelpers.getExecutionStartNode(this.data, this.workflow); - - // Can execute without webhook so go on - this.workflowExecute = new WorkflowExecute(additionalData, this.data.executionMode); - return await this.workflowExecute.run( - this.workflow, - startNode, - this.data.destinationNode, - this.data.pinData, - ); - } - // Execute only the nodes between start and destination nodes - this.workflowExecute = new WorkflowExecute(additionalData, this.data.executionMode); - return await this.workflowExecute.runPartialWorkflow( - this.workflow, - this.data.runData, - this.data.startNodes, - this.data.destinationNode, - this.data.pinData, - ); - } - - /** - * Sends hook data to the parent process that it executes them - */ - async sendHookToParentProcess(hook: string, parameters: any[]) { - try { - await sendToParentProcess('processHook', { - hook, - parameters, - }); - } catch (error) { - ErrorReporter.error(error); - this.logger.error(`There was a problem sending hook: "${hook}"`, { parameters, error }); - } - } - - /** - * Create a wrapper for hooks which simply forwards the data to - * the parent process where they then can be executed with access - * to database and to PushService - * - */ - getProcessForwardHooks(): WorkflowHooks { - const hookFunctions: IWorkflowExecuteHooks = { - nodeExecuteBefore: [ - async (nodeName: string): Promise => { - await this.sendHookToParentProcess('nodeExecuteBefore', [nodeName]); - }, - ], - nodeExecuteAfter: [ - async (nodeName: string, data: ITaskData): Promise => { - await this.sendHookToParentProcess('nodeExecuteAfter', [nodeName, data]); - }, - ], - workflowExecuteBefore: [ - async (): Promise => { - await this.sendHookToParentProcess('workflowExecuteBefore', []); - }, - ], - workflowExecuteAfter: [ - async (fullRunData: IRun, newStaticData?: IDataObject): Promise => { - await this.sendHookToParentProcess('workflowExecuteAfter', [fullRunData, newStaticData]); - }, - ], - nodeFetchedData: [ - async (workflowId: string, node: INode) => { - await this.sendHookToParentProcess('nodeFetchedData', [workflowId, node]); - }, - ], - }; - - const preExecuteFunctions = WorkflowExecuteAdditionalData.hookFunctionsPreExecute(); - - for (const key of Object.keys(preExecuteFunctions)) { - if (hookFunctions[key] === undefined) { - hookFunctions[key] = []; - } - hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]); - } - - return new WorkflowHooks( - hookFunctions, - this.data!.executionMode, - this.data!.executionId, - this.data!.workflowData, - { sessionId: this.data!.sessionId, retryOf: this.data!.retryOf as string }, - ); - } -} - -/** - * Sends data to parent process - * - * @param {string} type The type of data to send - * @param {*} data The data - */ -async function sendToParentProcess(type: string, data: any): Promise { - return await new Promise((resolve, reject) => { - process.send!( - { - type, - data, - }, - (error: Error) => { - if (error) { - return reject(error); - } - - resolve(); - }, - ); - }); -} - -const workflowRunner = new WorkflowRunnerProcess(); - -// Listen to messages from parent process which send the data of -// the workflow to process -process.on('message', async (message: IProcessMessage) => { - try { - if (message.type === 'startWorkflow') { - await sendToParentProcess('start', {}); - - const runData = await workflowRunner.runWorkflow(message.data); - - await sendToParentProcess('end', { - runData, - }); - - // Once the workflow got executed make sure the process gets killed again - process.exit(); - } else if (message.type === 'stopExecution' || message.type === 'timeout') { - // The workflow execution should be stopped - let runData: IRun; - - if (workflowRunner.workflowExecute !== undefined) { - const executionIds = Object.keys(workflowRunner.childExecutions); - - for (const executionId of executionIds) { - const childWorkflowExecute = workflowRunner.childExecutions[executionId]; - runData = childWorkflowExecute.workflowExecute.getFullRunData( - workflowRunner.childExecutions[executionId].startedAt, - ); - const timeOutError = - message.type === 'timeout' - ? new WorkflowOperationError('Workflow execution timed out!') - : new WorkflowOperationError('Workflow-Execution has been canceled!'); - - // If there is any data send it to parent process, if execution timedout add the error - - await childWorkflowExecute.workflowExecute.processSuccessExecution( - workflowRunner.childExecutions[executionId].startedAt, - childWorkflowExecute.workflow, - timeOutError, - ); - } - - // Workflow started already executing - runData = workflowRunner.workflowExecute.getFullRunData(workflowRunner.startedAt); - - const timeOutError = - message.type === 'timeout' - ? new WorkflowOperationError('Workflow execution timed out!') - : new WorkflowOperationError('Workflow-Execution has been canceled!'); - - runData.status = message.type === 'timeout' ? 'failed' : 'canceled'; - - // If there is any data send it to parent process, if execution timedout add the error - await workflowRunner.workflowExecute.processSuccessExecution( - workflowRunner.startedAt, - workflowRunner.workflow!, - timeOutError, - ); - } else { - // Workflow did not get started yet - runData = { - data: { - resultData: { - runData: {}, - }, - }, - finished: false, - mode: workflowRunner.data - ? workflowRunner.data.executionMode - : ('own' as WorkflowExecuteMode), - startedAt: workflowRunner.startedAt, - stoppedAt: new Date(), - status: 'canceled', - }; - - await workflowRunner.sendHookToParentProcess('workflowExecuteAfter', [runData]); - } - - await sendToParentProcess(message.type === 'timeout' ? message.type : 'end', { - runData, - }); - - // Stop process - process.exit(); - } else if (message.type === 'executionId') { - workflowRunner.executionIdCallback(message.data.executionId); - } - } catch (error) { - workflowRunner.logger.error(error.message); - - // Catch all uncaught errors and forward them to parent process - const executionError = { - ...error, - name: error.name || 'Error', - message: error.message, - stack: error.stack, - } as ExecutionError; - - await sendToParentProcess('processError', { - executionError, - }); - process.exit(); - } -}); diff --git a/packages/cli/src/commands/BaseCommand.ts b/packages/cli/src/commands/BaseCommand.ts index 61aab82702253..5cd5b0cbbfe02 100644 --- a/packages/cli/src/commands/BaseCommand.ts +++ b/packages/cli/src/commands/BaseCommand.ts @@ -44,7 +44,7 @@ export abstract class BaseCommand extends Command { /** * How long to wait for graceful shutdown before force killing the process. */ - protected gracefulShutdownTimeoutInS: number = config.getEnv('generic.gracefulShutdownTimeout'); + protected gracefulShutdownTimeoutInS = config.getEnv('generic.gracefulShutdownTimeout'); async init(): Promise { await initErrorHandling(); @@ -84,8 +84,8 @@ export abstract class BaseCommand extends Command { ); } if (process.env.EXECUTIONS_PROCESS === 'own') { - this.logger.warn( - 'Own mode has been deprecated and will be removed in a future version of n8n. If you need the isolation and performance gains, please consider using queue mode.', + throw new ApplicationError( + 'Own mode has been removed. If you need the isolation and performance gains, please consider using queue mode.', ); } diff --git a/packages/cli/src/commands/db/revert.ts b/packages/cli/src/commands/db/revert.ts index 967d4a8286453..c16219ea56649 100644 --- a/packages/cli/src/commands/db/revert.ts +++ b/packages/cli/src/commands/db/revert.ts @@ -1,6 +1,6 @@ import { Command, Flags } from '@oclif/core'; -import type { DataSourceOptions as ConnectionOptions } from 'typeorm'; -import { DataSource as Connection } from 'typeorm'; +import type { DataSourceOptions as ConnectionOptions } from '@n8n/typeorm'; +import { DataSource as Connection } from '@n8n/typeorm'; import { Container } from 'typedi'; import { Logger } from '@/Logger'; import { getConnectionOptions, setSchema } from '@/Db'; diff --git a/packages/cli/src/commands/executeBatch.ts b/packages/cli/src/commands/executeBatch.ts index cc50809622a86..483b4a3dc4abc 100644 --- a/packages/cli/src/commands/executeBatch.ts +++ b/packages/cli/src/commands/executeBatch.ts @@ -4,7 +4,7 @@ import { Flags } from '@oclif/core'; import fs from 'fs'; import os from 'os'; import type { IRun, ITaskData } from 'n8n-workflow'; -import { ApplicationError, jsonParse, sleep } from 'n8n-workflow'; +import { ApplicationError, jsonParse } from 'n8n-workflow'; import { sep } from 'path'; import { diff } from 'json-diff'; import pick from 'lodash/pick'; @@ -118,28 +118,9 @@ export class ExecuteBatch extends BaseCommand { } ExecuteBatch.cancelled = true; - const activeExecutionsInstance = Container.get(ActiveExecutions); - const stopPromises = activeExecutionsInstance - .getActiveExecutions() - .map(async (execution) => await activeExecutionsInstance.stopExecution(execution.id)); - await Promise.allSettled(stopPromises); + await Container.get(ActiveExecutions).shutdown(true); - setTimeout(() => process.exit(0), 30000); - - let executingWorkflows = activeExecutionsInstance.getActiveExecutions(); - - let count = 0; - while (executingWorkflows.length !== 0) { - if (count++ % 4 === 0) { - console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`); - executingWorkflows.map((execution) => { - console.log(` - Execution ID ${execution.id}, workflow ID: ${execution.workflowId}`); - }); - } - await sleep(500); - executingWorkflows = activeExecutionsInstance.getActiveExecutions(); - } // We may receive true but when called from `process.on` // we get the signal (SIGINT, etc.) if (skipExit !== true) { diff --git a/packages/cli/src/commands/import/credentials.ts b/packages/cli/src/commands/import/credentials.ts index 95a85fb90c80b..287452d7b6119 100644 --- a/packages/cli/src/commands/import/credentials.ts +++ b/packages/cli/src/commands/import/credentials.ts @@ -3,7 +3,7 @@ import { Flags } from '@oclif/core'; import { Cipher } from 'n8n-core'; import fs from 'fs'; import glob from 'fast-glob'; -import type { EntityManager } from 'typeorm'; +import type { EntityManager } from '@n8n/typeorm'; import * as Db from '@/Db'; import type { User } from '@db/entities/User'; diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index a1ddf03ec7442..ad09299089644 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -5,18 +5,17 @@ import { Flags, type Config } from '@oclif/core'; import path from 'path'; import { mkdir } from 'fs/promises'; import { createReadStream, createWriteStream, existsSync } from 'fs'; -import stream from 'stream'; +import { pipeline } from 'stream/promises'; import replaceStream from 'replacestream'; -import { promisify } from 'util'; import glob from 'fast-glob'; -import { sleep, jsonParse } from 'n8n-workflow'; +import { jsonParse } from 'n8n-workflow'; import config from '@/config'; import { ActiveExecutions } from '@/ActiveExecutions'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { Server } from '@/Server'; import { EDITOR_UI_DIST_DIR, LICENSE_FEATURES } from '@/constants'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { InternalHooks } from '@/InternalHooks'; import { License } from '@/License'; import { OrchestrationService } from '@/services/orchestration.service'; @@ -31,7 +30,6 @@ import { BaseCommand } from './BaseCommand'; // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-var-requires const open = require('open'); -const pipeline = promisify(stream.pipeline); export class Start extends BaseCommand { static description = 'Starts n8n. Makes Web-UI available and starts active workflows'; @@ -108,23 +106,7 @@ export class Start extends BaseCommand { await Container.get(InternalHooks).onN8nStop(); - // Wait for active workflow executions to finish - const activeExecutionsInstance = Container.get(ActiveExecutions); - let executingWorkflows = activeExecutionsInstance.getActiveExecutions(); - - let count = 0; - while (executingWorkflows.length !== 0) { - if (count++ % 4 === 0) { - console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`); - - executingWorkflows.map((execution) => { - console.log(` - Execution ID ${execution.id}, workflow ID: ${execution.workflowId}`); - }); - } - - await sleep(500); - executingWorkflows = activeExecutionsInstance.getActiveExecutions(); - } + await Container.get(ActiveExecutions).shutdown(); // Finally shut down Event Bus await Container.get(MessageEventBus).close(); @@ -228,31 +210,11 @@ export class Start extends BaseCommand { if (!orchestrationService.isMultiMainSetupEnabled) return; orchestrationService.multiMainSetup - .addListener('leadershipChange', async () => { - if (orchestrationService.isLeader) { - this.logger.debug('[Leadership change] Clearing all activation errors...'); - - await this.activeWorkflowRunner.clearAllActivationErrors(); - - this.logger.debug( - '[Leadership change] Adding all trigger- and poller-based workflows...', - ); - - await this.activeWorkflowRunner.addAllTriggerAndPollerBasedWorkflows(); - } else { - this.logger.debug( - '[Leadership change] Removing all trigger- and poller-based workflows...', - ); - - await this.activeWorkflowRunner.removeAllTriggerAndPollerBasedWorkflows(); - } - }) - .addListener('leadershipVacant', async () => { - this.logger.debug( - '[Leadership vacant] Removing all trigger- and poller-based workflows...', - ); - + .on('leader-stepdown', async () => { await this.activeWorkflowRunner.removeAllTriggerAndPollerBasedWorkflows(); + }) + .on('leader-takeover', async () => { + await this.activeWorkflowRunner.addAllTriggerAndPollerBasedWorkflows(); }); } @@ -372,16 +334,8 @@ export class Start extends BaseCommand { if (!orchestrationService.isMultiMainSetupEnabled) return; orchestrationService.multiMainSetup - .addListener('leadershipChange', async () => { - if (orchestrationService.isLeader) { - this.pruningService.startPruning(); - } else { - this.pruningService.stopPruning(); - } - }) - .addListener('leadershipVacant', () => { - this.pruningService.stopPruning(); - }); + .on('leader-stepdown', () => this.pruningService.stopPruning()) + .on('leader-takeover', () => this.pruningService.startPruning()); } async catch(error: Error) { diff --git a/packages/cli/src/commands/webhook.ts b/packages/cli/src/commands/webhook.ts index e7184bdfe0a70..5b72c1eb86a9b 100644 --- a/packages/cli/src/commands/webhook.ts +++ b/packages/cli/src/commands/webhook.ts @@ -1,6 +1,6 @@ import { Container } from 'typedi'; import { Flags, type Config } from '@oclif/core'; -import { sleep } from 'n8n-workflow'; +import { ApplicationError } from 'n8n-workflow'; import config from '@/config'; import { ActiveExecutions } from '@/ActiveExecutions'; @@ -42,21 +42,7 @@ export class Webhook extends BaseCommand { try { await this.externalHooks?.run('n8n.stop', []); - // Wait for active workflow executions to finish - const activeExecutionsInstance = Container.get(ActiveExecutions); - let executingWorkflows = activeExecutionsInstance.getActiveExecutions(); - - let count = 0; - while (executingWorkflows.length !== 0) { - if (count++ % 4 === 0) { - this.logger.info( - `Waiting for ${executingWorkflows.length} active executions to finish...`, - ); - } - - await sleep(500); - executingWorkflows = activeExecutionsInstance.getActiveExecutions(); - } + await Container.get(ActiveExecutions).shutdown(); } catch (error) { await this.exitWithCrash('There was an error shutting down n8n.', error); } @@ -102,6 +88,12 @@ export class Webhook extends BaseCommand { } async run() { + if (config.getEnv('multiMainSetup.enabled')) { + throw new ApplicationError( + 'Webhook process cannot be started when multi-main setup is enabled.', + ); + } + await Container.get(Queue).init(); await this.server.start(); this.logger.debug(`Webhook listener ID: ${this.server.uniqueInstanceId}`); diff --git a/packages/cli/src/commands/worker.ts b/packages/cli/src/commands/worker.ts index b8a47b86e5eef..eb9c947ecdb26 100644 --- a/packages/cli/src/commands/worker.ts +++ b/packages/cli/src/commands/worker.ts @@ -29,7 +29,7 @@ import { OwnershipService } from '@/services/ownership.service'; import type { ICredentialsOverwrite } from '@/Interfaces'; import { CredentialsOverwrites } from '@/CredentialsOverwrites'; import { rawBodyReader, bodyParser } from '@/middlewares'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import type { RedisServicePubSubSubscriber } from '@/services/redis/RedisServicePubSubSubscriber'; import { EventMessageGeneric } from '@/eventbus/EventMessageClasses/EventMessageGeneric'; import { OrchestrationHandlerWorkerService } from '@/services/orchestration/worker/orchestration.handler.worker.service'; @@ -267,9 +267,10 @@ export class Worker extends BaseCommand { } async init() { - const configuredShutdownTimeout = config.getEnv('queue.bull.gracefulShutdownTimeout'); - if (configuredShutdownTimeout) { - this.gracefulShutdownTimeoutInS = configuredShutdownTimeout; + const { QUEUE_WORKER_TIMEOUT } = process.env; + if (QUEUE_WORKER_TIMEOUT) { + this.gracefulShutdownTimeoutInS = + parseInt(QUEUE_WORKER_TIMEOUT, 10) || config.default('queue.bull.gracefulShutdownTimeout'); this.logger.warn( 'QUEUE_WORKER_TIMEOUT has been deprecated. Rename it to N8N_GRACEFUL_SHUTDOWN_TIMEOUT.', ); diff --git a/packages/cli/src/config/index.ts b/packages/cli/src/config/index.ts index 7300b0dcf739f..5cc2d2a0c1829 100644 --- a/packages/cli/src/config/index.ts +++ b/packages/cli/src/config/index.ts @@ -6,7 +6,6 @@ import { inTest, inE2ETests } from '@/constants'; if (inE2ETests) { // Skip loading config from env variables in end-to-end tests - process.env.EXECUTIONS_PROCESS = 'main'; process.env.N8N_DIAGNOSTICS_ENABLED = 'false'; process.env.N8N_PUBLIC_API_DISABLED = 'true'; process.env.EXTERNAL_FRONTEND_HOOKS_URLS = ''; diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index 7a452835c87d3..7cff2f87e0bc0 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -234,15 +234,6 @@ export const schema = { }, executions: { - // By default workflows get always executed in the main process. - // TODO: remove this and all usage of `executions.process` when `own` mode is deleted - process: { - doc: 'In what process workflows should be executed.', - format: ['main', 'own'] as const, - default: 'main', - env: 'EXECUTIONS_PROCESS', - }, - mode: { doc: 'If it should run executions directly or via queue', format: ['regular', 'queue'] as const, diff --git a/packages/cli/src/controllers/e2e.controller.ts b/packages/cli/src/controllers/e2e.controller.ts index 2d0b50d6c4780..1435c9064fe57 100644 --- a/packages/cli/src/controllers/e2e.controller.ts +++ b/packages/cli/src/controllers/e2e.controller.ts @@ -4,7 +4,7 @@ import config from '@/config'; import { SettingsRepository } from '@db/repositories/settings.repository'; import { UserRepository } from '@db/repositories/user.repository'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { License } from '@/License'; import { LICENSE_FEATURES, inE2ETests } from '@/constants'; import { NoAuthRequired, Patch, Post, RestController } from '@/decorators'; diff --git a/packages/cli/src/credentials/credentials.controller.ee.ts b/packages/cli/src/credentials/credentials.controller.ee.ts deleted file mode 100644 index 09dc2a4e75052..0000000000000 --- a/packages/cli/src/credentials/credentials.controller.ee.ts +++ /dev/null @@ -1,195 +0,0 @@ -import express from 'express'; -import type { INodeCredentialTestResult } from 'n8n-workflow'; -import { deepCopy } from 'n8n-workflow'; -import * as Db from '@/Db'; -import * as ResponseHelper from '@/ResponseHelper'; - -import type { CredentialRequest } from '@/requests'; -import { License } from '@/License'; -import { EECredentialsService as EECredentials } from './credentials.service.ee'; -import { OwnershipService } from '@/services/ownership.service'; -import { Container } from 'typedi'; -import { InternalHooks } from '@/InternalHooks'; -import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { NotFoundError } from '@/errors/response-errors/not-found.error'; -import { UnauthorizedError } from '@/errors/response-errors/unauthorized.error'; -import { CredentialsRepository } from '@/databases/repositories/credentials.repository'; -import * as utils from '@/utils'; -import { UserManagementMailer } from '@/UserManagement/email'; - -export const EECredentialsController = express.Router(); - -EECredentialsController.use((req, res, next) => { - if (!Container.get(License).isSharingEnabled()) { - // skip ee router and use free one - next('router'); - return; - } - // use ee router - next(); -}); - -/** - * GET /credentials/:id - */ -EECredentialsController.get( - '/:id(\\w+)', - (req, res, next) => (req.params.id === 'new' ? next('router') : next()), // skip ee router and use free one for naming - ResponseHelper.send(async (req: CredentialRequest.Get) => { - const { id: credentialId } = req.params; - const includeDecryptedData = req.query.includeData === 'true'; - - let credential = await Container.get(CredentialsRepository).findOne({ - where: { id: credentialId }, - relations: ['shared', 'shared.user'], - }); - - if (!credential) { - throw new NotFoundError( - 'Could not load the credential. If you think this is an error, ask the owner to share it with you again', - ); - } - - const userSharing = credential.shared?.find((shared) => shared.user.id === req.user.id); - - if (!userSharing && !req.user.hasGlobalScope('credential:read')) { - throw new UnauthorizedError('Forbidden.'); - } - - credential = Container.get(OwnershipService).addOwnedByAndSharedWith(credential); - - if (!includeDecryptedData || !userSharing || userSharing.role !== 'credential:owner') { - const { data: _, ...rest } = credential; - return { ...rest }; - } - - const { data: _, ...rest } = credential; - - const decryptedData = EECredentials.redact(EECredentials.decrypt(credential), credential); - - return { data: decryptedData, ...rest }; - }), -); - -/** - * POST /credentials/test - * - * Test if a credential is valid. - */ -EECredentialsController.post( - '/test', - ResponseHelper.send(async (req: CredentialRequest.Test): Promise => { - const { credentials } = req.body; - - const credentialId = credentials.id; - const { ownsCredential } = await EECredentials.isOwned(req.user, credentialId); - - const sharing = await EECredentials.getSharing(req.user, credentialId, { - allowGlobalScope: true, - globalScope: 'credential:read', - }); - if (!ownsCredential) { - if (!sharing) { - throw new UnauthorizedError('Forbidden'); - } - - const decryptedData = EECredentials.decrypt(sharing.credentials); - Object.assign(credentials, { data: decryptedData }); - } - - const mergedCredentials = deepCopy(credentials); - if (mergedCredentials.data && sharing?.credentials) { - const decryptedData = EECredentials.decrypt(sharing.credentials); - mergedCredentials.data = EECredentials.unredact(mergedCredentials.data, decryptedData); - } - - return await EECredentials.test(req.user, mergedCredentials); - }), -); - -/** - * (EE) PUT /credentials/:id/share - * - * Grant or remove users' access to a credential. - */ - -EECredentialsController.put( - '/:credentialId/share', - ResponseHelper.send(async (req: CredentialRequest.Share) => { - const { credentialId } = req.params; - const { shareWithIds } = req.body; - - if ( - !Array.isArray(shareWithIds) || - !shareWithIds.every((userId) => typeof userId === 'string') - ) { - throw new BadRequestError('Bad request'); - } - - const isOwnedRes = await EECredentials.isOwned(req.user, credentialId); - const { ownsCredential } = isOwnedRes; - let { credential } = isOwnedRes; - if (!ownsCredential || !credential) { - credential = undefined; - // Allow owners/admins to share - if (req.user.hasGlobalScope('credential:share')) { - const sharedRes = await EECredentials.getSharing(req.user, credentialId, { - allowGlobalScope: true, - globalScope: 'credential:share', - }); - credential = sharedRes?.credentials; - } - if (!credential) { - throw new UnauthorizedError('Forbidden'); - } - } - - const ownerIds = ( - await EECredentials.getSharings(Db.getConnection().createEntityManager(), credentialId, [ - 'shared', - ]) - ) - .filter((e) => e.role === 'credential:owner') - .map((e) => e.userId); - - let amountRemoved: number | null = null; - let newShareeIds: string[] = []; - await Db.transaction(async (trx) => { - // remove all sharings that are not supposed to exist anymore - const { affected } = await Container.get(CredentialsRepository).pruneSharings( - trx, - credentialId, - [...ownerIds, ...shareWithIds], - ); - if (affected) amountRemoved = affected; - - const sharings = await EECredentials.getSharings(trx, credentialId); - - // extract the new sharings that need to be added - newShareeIds = utils.rightDiff( - [sharings, (sharing) => sharing.userId], - [shareWithIds, (shareeId) => shareeId], - ); - - if (newShareeIds.length) { - await EECredentials.share(trx, credential!, newShareeIds); - } - }); - - void Container.get(InternalHooks).onUserSharedCredentials({ - user: req.user, - credential_name: credential.name, - credential_type: credential.type, - credential_id: credential.id, - user_id_sharer: req.user.id, - user_ids_sharees_added: newShareeIds, - sharees_removed: amountRemoved, - }); - - await Container.get(UserManagementMailer).notifyCredentialsShared({ - sharer: req.user, - newShareeIds, - credentialsName: credential.name, - }); - }), -); diff --git a/packages/cli/src/credentials/credentials.controller.ts b/packages/cli/src/credentials/credentials.controller.ts index d7406d496f265..9d26c37f08d05 100644 --- a/packages/cli/src/credentials/credentials.controller.ts +++ b/packages/cli/src/credentials/credentials.controller.ts @@ -1,62 +1,100 @@ -import express from 'express'; -import type { INodeCredentialTestResult } from 'n8n-workflow'; import { deepCopy } from 'n8n-workflow'; - -import * as ResponseHelper from '@/ResponseHelper'; import config from '@/config'; -import { EECredentialsController } from './credentials.controller.ee'; import { CredentialsService } from './credentials.service'; - -import type { ICredentialsDb } from '@/Interfaces'; -import type { CredentialRequest, ListQuery } from '@/requests'; -import { Container } from 'typedi'; +import { CredentialRequest, ListQuery } from '@/requests'; import { InternalHooks } from '@/InternalHooks'; -import { listQueryMiddleware } from '@/middlewares'; import { Logger } from '@/Logger'; import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { UnauthorizedError } from '@/errors/response-errors/unauthorized.error'; import { NamingService } from '@/services/naming.service'; +import { License } from '@/License'; +import { CredentialsRepository } from '@/databases/repositories/credentials.repository'; +import { OwnershipService } from '@/services/ownership.service'; +import { EnterpriseCredentialsService } from './credentials.service.ee'; +import { Authorized, Delete, Get, Licensed, Patch, Post, Put, RestController } from '@/decorators'; +import { BadRequestError } from '@/errors/response-errors/bad-request.error'; +import { UserManagementMailer } from '@/UserManagement/email'; +import * as Db from '@/Db'; +import * as utils from '@/utils'; +import { listQueryMiddleware } from '@/middlewares'; -export const credentialsController = express.Router(); -credentialsController.use('/', EECredentialsController); - -/** - * GET /credentials - */ -credentialsController.get( - '/', - listQueryMiddleware, - ResponseHelper.send(async (req: ListQuery.Request) => { - return await CredentialsService.getMany(req.user, { listQueryOptions: req.listQueryOptions }); - }), -); - -/** - * GET /credentials/new - * - * Generate a unique credential name. - */ -credentialsController.get( - '/new', - ResponseHelper.send(async (req: CredentialRequest.NewName) => { +@Authorized() +@RestController('/credentials') +export class CredentialsController { + constructor( + private readonly credentialsService: CredentialsService, + private readonly enterpriseCredentialsService: EnterpriseCredentialsService, + private readonly credentialsRepository: CredentialsRepository, + private readonly namingService: NamingService, + private readonly license: License, + private readonly logger: Logger, + private readonly ownershipService: OwnershipService, + private readonly internalHooks: InternalHooks, + private readonly userManagementMailer: UserManagementMailer, + ) {} + + @Get('/', { middlewares: listQueryMiddleware }) + async getMany(req: ListQuery.Request) { + return await this.credentialsService.getMany(req.user, { + listQueryOptions: req.listQueryOptions, + }); + } + + @Get('/new') + async generateUniqueName(req: CredentialRequest.NewName) { const requestedName = req.query.name ?? config.getEnv('credentials.defaultName'); return { - name: await Container.get(NamingService).getUniqueCredentialName(requestedName), + name: await this.namingService.getUniqueCredentialName(requestedName), }; - }), -); - -/** - * GET /credentials/:id - */ -credentialsController.get( - '/:id(\\w+)', - ResponseHelper.send(async (req: CredentialRequest.Get) => { + } + + @Get('/:id') + async getOne(req: CredentialRequest.Get) { + if (this.license.isSharingEnabled()) { + const { id: credentialId } = req.params; + const includeDecryptedData = req.query.includeData === 'true'; + + let credential = await this.credentialsRepository.findOne({ + where: { id: credentialId }, + relations: ['shared', 'shared.user'], + }); + + if (!credential) { + throw new NotFoundError( + 'Could not load the credential. If you think this is an error, ask the owner to share it with you again', + ); + } + + const userSharing = credential.shared?.find((shared) => shared.user.id === req.user.id); + + if (!userSharing && !req.user.hasGlobalScope('credential:read')) { + throw new UnauthorizedError('Forbidden.'); + } + + credential = this.ownershipService.addOwnedByAndSharedWith(credential); + + if (!includeDecryptedData || !userSharing || userSharing.role !== 'credential:owner') { + const { data: _, ...rest } = credential; + return { ...rest }; + } + + const { data: _, ...rest } = credential; + + const decryptedData = this.credentialsService.redact( + this.credentialsService.decrypt(credential), + credential, + ); + + return { data: decryptedData, ...rest }; + } + + // non-enterprise + const { id: credentialId } = req.params; const includeDecryptedData = req.query.includeData === 'true'; - const sharing = await CredentialsService.getSharing( + const sharing = await this.credentialsService.getSharing( req.user, credentialId, { allowGlobalScope: true, globalScope: 'credential:read' }, @@ -75,52 +113,79 @@ credentialsController.get( return { ...rest }; } - const decryptedData = CredentialsService.redact( - CredentialsService.decrypt(credential), + const decryptedData = this.credentialsService.redact( + this.credentialsService.decrypt(credential), credential, ); return { data: decryptedData, ...rest }; - }), -); - -/** - * POST /credentials/test - * - * Test if a credential is valid. - */ -credentialsController.post( - '/test', - ResponseHelper.send(async (req: CredentialRequest.Test): Promise => { + } + + @Post('/test') + async testCredentials(req: CredentialRequest.Test) { + if (this.license.isSharingEnabled()) { + const { credentials } = req.body; + + const credentialId = credentials.id; + const { ownsCredential } = await this.enterpriseCredentialsService.isOwned( + req.user, + credentialId, + ); + + const sharing = await this.enterpriseCredentialsService.getSharing(req.user, credentialId, { + allowGlobalScope: true, + globalScope: 'credential:read', + }); + if (!ownsCredential) { + if (!sharing) { + throw new UnauthorizedError('Forbidden'); + } + + const decryptedData = this.credentialsService.decrypt(sharing.credentials); + Object.assign(credentials, { data: decryptedData }); + } + + const mergedCredentials = deepCopy(credentials); + if (mergedCredentials.data && sharing?.credentials) { + const decryptedData = this.credentialsService.decrypt(sharing.credentials); + mergedCredentials.data = this.credentialsService.unredact( + mergedCredentials.data, + decryptedData, + ); + } + + return await this.credentialsService.test(req.user, mergedCredentials); + } + + // non-enterprise + const { credentials } = req.body; - const sharing = await CredentialsService.getSharing(req.user, credentials.id, { + const sharing = await this.credentialsService.getSharing(req.user, credentials.id, { allowGlobalScope: true, globalScope: 'credential:read', }); const mergedCredentials = deepCopy(credentials); if (mergedCredentials.data && sharing?.credentials) { - const decryptedData = CredentialsService.decrypt(sharing.credentials); - mergedCredentials.data = CredentialsService.unredact(mergedCredentials.data, decryptedData); + const decryptedData = this.credentialsService.decrypt(sharing.credentials); + mergedCredentials.data = this.credentialsService.unredact( + mergedCredentials.data, + decryptedData, + ); } - return await CredentialsService.test(req.user, mergedCredentials); - }), -); + return await this.credentialsService.test(req.user, mergedCredentials); + } -/** - * POST /credentials - */ -credentialsController.post( - '/', - ResponseHelper.send(async (req: CredentialRequest.Create) => { - const newCredential = await CredentialsService.prepareCreateData(req.body); + @Post('/') + async createCredentials(req: CredentialRequest.Create) { + const newCredential = await this.credentialsService.prepareCreateData(req.body); - const encryptedData = CredentialsService.createEncryptedData(null, newCredential); - const credential = await CredentialsService.save(newCredential, encryptedData, req.user); + const encryptedData = this.credentialsService.createEncryptedData(null, newCredential); + const credential = await this.credentialsService.save(newCredential, encryptedData, req.user); - void Container.get(InternalHooks).onUserCreatedCredentials({ + void this.internalHooks.onUserCreatedCredentials({ user: req.user, credential_name: newCredential.name, credential_type: credential.type, @@ -129,18 +194,13 @@ credentialsController.post( }); return credential; - }), -); - -/** - * PATCH /credentials/:id - */ -credentialsController.patch( - '/:id(\\w+)', - ResponseHelper.send(async (req: CredentialRequest.Update): Promise => { + } + + @Patch('/:id') + async updateCredentials(req: CredentialRequest.Update) { const { id: credentialId } = req.params; - const sharing = await CredentialsService.getSharing( + const sharing = await this.credentialsService.getSharing( req.user, credentialId, { @@ -151,42 +211,36 @@ credentialsController.patch( ); if (!sharing) { - Container.get(Logger).info( - 'Attempt to update credential blocked due to lack of permissions', - { - credentialId, - userId: req.user.id, - }, - ); + this.logger.info('Attempt to update credential blocked due to lack of permissions', { + credentialId, + userId: req.user.id, + }); throw new NotFoundError( 'Credential to be updated not found. You can only update credentials owned by you', ); } if (sharing.role !== 'credential:owner' && !req.user.hasGlobalScope('credential:update')) { - Container.get(Logger).info( - 'Attempt to update credential blocked due to lack of permissions', - { - credentialId, - userId: req.user.id, - }, - ); + this.logger.info('Attempt to update credential blocked due to lack of permissions', { + credentialId, + userId: req.user.id, + }); throw new UnauthorizedError('You can only update credentials owned by you'); } const { credentials: credential } = sharing; - const decryptedData = CredentialsService.decrypt(credential); - const preparedCredentialData = await CredentialsService.prepareUpdateData( + const decryptedData = this.credentialsService.decrypt(credential); + const preparedCredentialData = await this.credentialsService.prepareUpdateData( req.body, decryptedData, ); - const newCredentialData = CredentialsService.createEncryptedData( + const newCredentialData = this.credentialsService.createEncryptedData( credentialId, preparedCredentialData, ); - const responseData = await CredentialsService.update(credentialId, newCredentialData); + const responseData = await this.credentialsService.update(credentialId, newCredentialData); if (responseData === null) { throw new NotFoundError(`Credential ID "${credentialId}" could not be found to be updated.`); @@ -195,21 +249,16 @@ credentialsController.patch( // Remove the encrypted data as it is not needed in the frontend const { data: _, ...rest } = responseData; - Container.get(Logger).verbose('Credential updated', { credentialId }); + this.logger.verbose('Credential updated', { credentialId }); return { ...rest }; - }), -); - -/** - * DELETE /credentials/:id - */ -credentialsController.delete( - '/:id(\\w+)', - ResponseHelper.send(async (req: CredentialRequest.Delete) => { + } + + @Delete('/:id') + async deleteCredentials(req: CredentialRequest.Delete) { const { id: credentialId } = req.params; - const sharing = await CredentialsService.getSharing( + const sharing = await this.credentialsService.getSharing( req.user, credentialId, { @@ -220,33 +269,112 @@ credentialsController.delete( ); if (!sharing) { - Container.get(Logger).info( - 'Attempt to delete credential blocked due to lack of permissions', - { - credentialId, - userId: req.user.id, - }, - ); + this.logger.info('Attempt to delete credential blocked due to lack of permissions', { + credentialId, + userId: req.user.id, + }); throw new NotFoundError( 'Credential to be deleted not found. You can only removed credentials owned by you', ); } if (sharing.role !== 'credential:owner' && !req.user.hasGlobalScope('credential:delete')) { - Container.get(Logger).info( - 'Attempt to delete credential blocked due to lack of permissions', - { - credentialId, - userId: req.user.id, - }, - ); + this.logger.info('Attempt to delete credential blocked due to lack of permissions', { + credentialId, + userId: req.user.id, + }); throw new UnauthorizedError('You can only remove credentials owned by you'); } const { credentials: credential } = sharing; - await CredentialsService.delete(credential); + await this.credentialsService.delete(credential); return true; - }), -); + } + + @Licensed('feat:sharing') + @Put('/:id/share') + async shareCredentials(req: CredentialRequest.Share) { + const { id: credentialId } = req.params; + const { shareWithIds } = req.body; + + if ( + !Array.isArray(shareWithIds) || + !shareWithIds.every((userId) => typeof userId === 'string') + ) { + throw new BadRequestError('Bad request'); + } + + const isOwnedRes = await this.enterpriseCredentialsService.isOwned(req.user, credentialId); + const { ownsCredential } = isOwnedRes; + let { credential } = isOwnedRes; + if (!ownsCredential || !credential) { + credential = undefined; + // Allow owners/admins to share + if (req.user.hasGlobalScope('credential:share')) { + const sharedRes = await this.enterpriseCredentialsService.getSharing( + req.user, + credentialId, + { + allowGlobalScope: true, + globalScope: 'credential:share', + }, + ); + credential = sharedRes?.credentials; + } + if (!credential) { + throw new UnauthorizedError('Forbidden'); + } + } + + const ownerIds = ( + await this.enterpriseCredentialsService.getSharings( + Db.getConnection().createEntityManager(), + credentialId, + ['shared'], + ) + ) + .filter((e) => e.role === 'credential:owner') + .map((e) => e.userId); + + let amountRemoved: number | null = null; + let newShareeIds: string[] = []; + await Db.transaction(async (trx) => { + // remove all sharings that are not supposed to exist anymore + const { affected } = await this.credentialsRepository.pruneSharings(trx, credentialId, [ + ...ownerIds, + ...shareWithIds, + ]); + if (affected) amountRemoved = affected; + + const sharings = await this.enterpriseCredentialsService.getSharings(trx, credentialId); + + // extract the new sharings that need to be added + newShareeIds = utils.rightDiff( + [sharings, (sharing) => sharing.userId], + [shareWithIds, (shareeId) => shareeId], + ); + + if (newShareeIds.length) { + await this.enterpriseCredentialsService.share(trx, credential!, newShareeIds); + } + }); + + void this.internalHooks.onUserSharedCredentials({ + user: req.user, + credential_name: credential.name, + credential_type: credential.type, + credential_id: credential.id, + user_id_sharer: req.user.id, + user_ids_sharees_added: newShareeIds, + sharees_removed: amountRemoved, + }); + + await this.userManagementMailer.notifyCredentialsShared({ + sharer: req.user, + newShareeIds, + credentialsName: credential.name, + }); + } +} diff --git a/packages/cli/src/credentials/credentials.service.ee.ts b/packages/cli/src/credentials/credentials.service.ee.ts index 9b31d3c4ebf8b..0958a02db8d68 100644 --- a/packages/cli/src/credentials/credentials.service.ee.ts +++ b/packages/cli/src/credentials/credentials.service.ee.ts @@ -1,17 +1,20 @@ -import { Container } from 'typedi'; -import type { EntityManager, FindOptionsWhere } from 'typeorm'; -import { CredentialsEntity } from '@db/entities/CredentialsEntity'; +import type { EntityManager, FindOptionsWhere } from '@n8n/typeorm'; import type { SharedCredentials } from '@db/entities/SharedCredentials'; import type { User } from '@db/entities/User'; -import { CredentialsService, type CredentialsGetSharedOptions } from './credentials.service'; +import { type CredentialsGetSharedOptions } from './credentials.service'; import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository'; import { UserRepository } from '@/databases/repositories/user.repository'; +import { CredentialsEntity } from '@/databases/entities/CredentialsEntity'; +import { Service } from 'typedi'; -export class EECredentialsService extends CredentialsService { - static async isOwned( - user: User, - credentialId: string, - ): Promise<{ ownsCredential: boolean; credential?: CredentialsEntity }> { +@Service() +export class EnterpriseCredentialsService { + constructor( + private readonly userRepository: UserRepository, + private readonly sharedCredentialsRepository: SharedCredentialsRepository, + ) {} + + async isOwned(user: User, credentialId: string) { const sharing = await this.getSharing(user, credentialId, { allowGlobalScope: false }, [ 'credentials', ]); @@ -26,12 +29,12 @@ export class EECredentialsService extends CredentialsService { /** * Retrieve the sharing that matches a user and a credential. */ - static async getSharing( + async getSharing( user: User, credentialId: string, options: CredentialsGetSharedOptions, relations: string[] = ['credentials'], - ): Promise { + ) { const where: FindOptionsWhere = { credentialsId: credentialId }; // Omit user from where if the requesting user has relevant @@ -41,35 +44,28 @@ export class EECredentialsService extends CredentialsService { where.userId = user.id; } - return await Container.get(SharedCredentialsRepository).findOne({ + return await this.sharedCredentialsRepository.findOne({ where, relations, }); } - static async getSharings( - transaction: EntityManager, - credentialId: string, - relations = ['shared'], - ): Promise { + async getSharings(transaction: EntityManager, credentialId: string, relations = ['shared']) { const credential = await transaction.findOne(CredentialsEntity, { where: { id: credentialId }, relations, }); + return credential?.shared ?? []; } - static async share( - transaction: EntityManager, - credential: CredentialsEntity, - shareWithIds: string[], - ): Promise { - const users = await Container.get(UserRepository).getByIds(transaction, shareWithIds); + async share(transaction: EntityManager, credential: CredentialsEntity, shareWithIds: string[]) { + const users = await this.userRepository.getByIds(transaction, shareWithIds); const newSharedCredentials = users .filter((user) => !user.isPending) .map((user) => - Container.get(SharedCredentialsRepository).create({ + this.sharedCredentialsRepository.create({ credentialsId: credential.id, userId: user.id, role: 'credential:user', diff --git a/packages/cli/src/credentials/credentials.service.ts b/packages/cli/src/credentials/credentials.service.ts index 9533b5a1ad6f7..9bfb505ea3ab8 100644 --- a/packages/cli/src/credentials/credentials.service.ts +++ b/packages/cli/src/credentials/credentials.service.ts @@ -3,18 +3,14 @@ import type { ICredentialDataDecryptedObject, ICredentialsDecrypted, ICredentialType, - INodeCredentialTestResult, INodeProperties, } from 'n8n-workflow'; import { CREDENTIAL_EMPTY_VALUE, deepCopy, NodeHelpers } from 'n8n-workflow'; -import { Container } from 'typedi'; -import type { FindOptionsWhere } from 'typeorm'; - +import type { FindOptionsWhere } from '@n8n/typeorm'; import type { Scope } from '@n8n/permissions'; - import * as Db from '@/Db'; import type { ICredentialsDb } from '@/Interfaces'; -import { CredentialsHelper, createCredentialsFromCredentialsEntity } from '@/CredentialsHelper'; +import { createCredentialsFromCredentialsEntity } from '@/CredentialsHelper'; import { CREDENTIAL_BLANKING_VALUE } from '@/constants'; import { CredentialsEntity } from '@db/entities/CredentialsEntity'; import { SharedCredentials } from '@db/entities/SharedCredentials'; @@ -27,20 +23,33 @@ import { OwnershipService } from '@/services/ownership.service'; import { Logger } from '@/Logger'; import { CredentialsRepository } from '@db/repositories/credentials.repository'; import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository'; +import { Service } from 'typedi'; +import { CredentialsTester } from '@/services/credentials-tester.service'; export type CredentialsGetSharedOptions = | { allowGlobalScope: true; globalScope: Scope } | { allowGlobalScope: false }; +@Service() export class CredentialsService { - static async get(where: FindOptionsWhere, options?: { relations: string[] }) { - return await Container.get(CredentialsRepository).findOne({ + constructor( + private readonly credentialsRepository: CredentialsRepository, + private readonly sharedCredentialsRepository: SharedCredentialsRepository, + private readonly ownershipService: OwnershipService, + private readonly logger: Logger, + private readonly credentialsTester: CredentialsTester, + private readonly externalHooks: ExternalHooks, + private readonly credentialTypes: CredentialTypes, + ) {} + + async get(where: FindOptionsWhere, options?: { relations: string[] }) { + return await this.credentialsRepository.findOne({ relations: options?.relations, where, }); } - static async getMany( + async getMany( user: User, options: { listQueryOptions?: ListQuery.Options; onlyOwn?: boolean } = {}, ) { @@ -48,31 +57,29 @@ export class CredentialsService { const isDefaultSelect = !options.listQueryOptions?.select; if (returnAll) { - const credentials = await Container.get(CredentialsRepository).findMany( - options.listQueryOptions, - ); + const credentials = await this.credentialsRepository.findMany(options.listQueryOptions); return isDefaultSelect - ? credentials.map((c) => Container.get(OwnershipService).addOwnedByAndSharedWith(c)) + ? credentials.map((c) => this.ownershipService.addOwnedByAndSharedWith(c)) : credentials; } - const ids = await Container.get(SharedCredentialsRepository).getAccessibleCredentials(user.id); + const ids = await this.sharedCredentialsRepository.getAccessibleCredentialIds([user.id]); - const credentials = await Container.get(CredentialsRepository).findMany( + const credentials = await this.credentialsRepository.findMany( options.listQueryOptions, ids, // only accessible credentials ); return isDefaultSelect - ? credentials.map((c) => Container.get(OwnershipService).addOwnedByAndSharedWith(c)) + ? credentials.map((c) => this.ownershipService.addOwnedByAndSharedWith(c)) : credentials; } /** * Retrieve the sharing that matches a user and a credential. */ - static async getSharing( + async getSharing( user: User, credentialId: string, options: CredentialsGetSharedOptions, @@ -88,17 +95,17 @@ export class CredentialsService { where.role = 'credential:owner'; } - return await Container.get(SharedCredentialsRepository).findOne({ where, relations }); + return await this.sharedCredentialsRepository.findOne({ where, relations }); } - static async prepareCreateData( + async prepareCreateData( data: CredentialRequest.CredentialProperties, ): Promise { const { id, ...rest } = data; // This saves us a merge but requires some type casting. These // types are compatible for this case. - const newCredentials = Container.get(CredentialsRepository).create(rest as ICredentialsDb); + const newCredentials = this.credentialsRepository.create(rest as ICredentialsDb); await validateEntity(newCredentials); @@ -110,7 +117,7 @@ export class CredentialsService { return newCredentials; } - static async prepareUpdateData( + async prepareUpdateData( data: CredentialRequest.CredentialProperties, decryptedData: ICredentialDataDecryptedObject, ): Promise { @@ -121,7 +128,7 @@ export class CredentialsService { // This saves us a merge but requires some type casting. These // types are compatible for this case. - const updateData = Container.get(CredentialsRepository).create(mergedData as ICredentialsDb); + const updateData = this.credentialsRepository.create(mergedData as ICredentialsDb); await validateEntity(updateData); @@ -141,7 +148,7 @@ export class CredentialsService { return updateData; } - static createEncryptedData(credentialId: string | null, data: CredentialsEntity): ICredentialsDb { + createEncryptedData(credentialId: string | null, data: CredentialsEntity): ICredentialsDb { const credentials = new Credentials( { id: credentialId, name: data.name }, data.type, @@ -158,35 +165,28 @@ export class CredentialsService { return newCredentialData; } - static decrypt(credential: CredentialsEntity): ICredentialDataDecryptedObject { + decrypt(credential: CredentialsEntity) { const coreCredential = createCredentialsFromCredentialsEntity(credential); return coreCredential.getData(); } - static async update( - credentialId: string, - newCredentialData: ICredentialsDb, - ): Promise { - await Container.get(ExternalHooks).run('credentials.update', [newCredentialData]); + async update(credentialId: string, newCredentialData: ICredentialsDb) { + await this.externalHooks.run('credentials.update', [newCredentialData]); // Update the credentials in DB - await Container.get(CredentialsRepository).update(credentialId, newCredentialData); + await this.credentialsRepository.update(credentialId, newCredentialData); // We sadly get nothing back from "update". Neither if it updated a record // nor the new value. So query now the updated entry. - return await Container.get(CredentialsRepository).findOneBy({ id: credentialId }); + return await this.credentialsRepository.findOneBy({ id: credentialId }); } - static async save( - credential: CredentialsEntity, - encryptedData: ICredentialsDb, - user: User, - ): Promise { + async save(credential: CredentialsEntity, encryptedData: ICredentialsDb, user: User) { // To avoid side effects const newCredential = new CredentialsEntity(); Object.assign(newCredential, credential, encryptedData); - await Container.get(ExternalHooks).run('credentials.create', [encryptedData]); + await this.externalHooks.run('credentials.create', [encryptedData]); const result = await Db.transaction(async (transactionManager) => { const savedCredential = await transactionManager.save(newCredential); @@ -205,39 +205,31 @@ export class CredentialsService { return savedCredential; }); - Container.get(Logger).verbose('New credential created', { + this.logger.verbose('New credential created', { credentialId: newCredential.id, ownerId: user.id, }); return result; } - static async delete(credentials: CredentialsEntity): Promise { - await Container.get(ExternalHooks).run('credentials.delete', [credentials.id]); + async delete(credentials: CredentialsEntity) { + await this.externalHooks.run('credentials.delete', [credentials.id]); - await Container.get(CredentialsRepository).remove(credentials); + await this.credentialsRepository.remove(credentials); } - static async test( - user: User, - credentials: ICredentialsDecrypted, - ): Promise { - const helper = Container.get(CredentialsHelper); - return await helper.testCredentials(user, credentials.type, credentials); + async test(user: User, credentials: ICredentialsDecrypted) { + return await this.credentialsTester.testCredentials(user, credentials.type, credentials); } // Take data and replace all sensitive values with a sentinel value. // This will replace password fields and oauth data. - static redact( - data: ICredentialDataDecryptedObject, - credential: CredentialsEntity, - ): ICredentialDataDecryptedObject { + redact(data: ICredentialDataDecryptedObject, credential: CredentialsEntity) { const copiedData = deepCopy(data); - const credTypes = Container.get(CredentialTypes); let credType: ICredentialType; try { - credType = credTypes.getByName(credential.type); + credType = this.credentialTypes.getByName(credential.type); } catch { // This _should_ only happen when testing. If it does happen in // production it means it's either a mangled credential or a @@ -249,7 +241,7 @@ export class CredentialsService { const getExtendedProps = (type: ICredentialType) => { const props: INodeProperties[] = []; for (const e of type.extends ?? []) { - const extendsType = credTypes.getByName(e); + const extendsType = this.credentialTypes.getByName(e); const extendedProps = getExtendedProps(extendsType); NodeHelpers.mergeNodeProperties(props, extendedProps); } @@ -287,7 +279,7 @@ export class CredentialsService { return copiedData; } - private static unredactRestoreValues(unmerged: any, replacement: any) { + private unredactRestoreValues(unmerged: any, replacement: any) { // eslint-disable-next-line @typescript-eslint/no-unsafe-argument for (const [key, value] of Object.entries(unmerged)) { if (value === CREDENTIAL_BLANKING_VALUE || value === CREDENTIAL_EMPTY_VALUE) { @@ -310,10 +302,10 @@ export class CredentialsService { // Take unredacted data (probably from the DB) and merge it with // redacted data to create an unredacted version. - static unredact( + unredact( redactedData: ICredentialDataDecryptedObject, savedData: ICredentialDataDecryptedObject, - ): ICredentialDataDecryptedObject { + ) { // Replace any blank sentinel values with their saved version const mergedData = deepCopy(redactedData); this.unredactRestoreValues(mergedData, savedData); diff --git a/packages/cli/src/databases/config.ts b/packages/cli/src/databases/config.ts index aab7b56d33ed1..5e4296a89b528 100644 --- a/packages/cli/src/databases/config.ts +++ b/packages/cli/src/databases/config.ts @@ -1,8 +1,8 @@ import path from 'path'; import { Container } from 'typedi'; -import type { SqliteConnectionOptions } from 'typeorm/driver/sqlite/SqliteConnectionOptions'; -import type { PostgresConnectionOptions } from 'typeorm/driver/postgres/PostgresConnectionOptions'; -import type { MysqlConnectionOptions } from 'typeorm/driver/mysql/MysqlConnectionOptions'; +import type { SqliteConnectionOptions } from '@n8n/typeorm/driver/sqlite/SqliteConnectionOptions'; +import type { PostgresConnectionOptions } from '@n8n/typeorm/driver/postgres/PostgresConnectionOptions'; +import type { MysqlConnectionOptions } from '@n8n/typeorm/driver/mysql/MysqlConnectionOptions'; import { InstanceSettings } from 'n8n-core'; import { entities } from './entities'; diff --git a/packages/cli/src/databases/dsl/Column.ts b/packages/cli/src/databases/dsl/Column.ts index 48d2191478977..aa5ff04e0d839 100644 --- a/packages/cli/src/databases/dsl/Column.ts +++ b/packages/cli/src/databases/dsl/Column.ts @@ -1,4 +1,4 @@ -import type { Driver, TableColumnOptions } from 'typeorm'; +import type { Driver, TableColumnOptions } from '@n8n/typeorm'; export class Column { private type: 'int' | 'boolean' | 'varchar' | 'text' | 'json' | 'timestamp' | 'uuid'; diff --git a/packages/cli/src/databases/dsl/Indices.ts b/packages/cli/src/databases/dsl/Indices.ts index 5e5a3346b8266..a5ec95e1012ed 100644 --- a/packages/cli/src/databases/dsl/Indices.ts +++ b/packages/cli/src/databases/dsl/Indices.ts @@ -1,5 +1,5 @@ -import type { QueryRunner } from 'typeorm'; -import { TableIndex } from 'typeorm'; +import type { QueryRunner } from '@n8n/typeorm'; +import { TableIndex } from '@n8n/typeorm'; import LazyPromise from 'p-lazy'; abstract class IndexOperation extends LazyPromise { diff --git a/packages/cli/src/databases/dsl/Table.ts b/packages/cli/src/databases/dsl/Table.ts index 94ed4392d0298..08cea8d29d40e 100644 --- a/packages/cli/src/databases/dsl/Table.ts +++ b/packages/cli/src/databases/dsl/Table.ts @@ -1,5 +1,5 @@ -import type { TableForeignKeyOptions, TableIndexOptions, QueryRunner } from 'typeorm'; -import { Table, TableColumn, TableForeignKey } from 'typeorm'; +import type { TableForeignKeyOptions, TableIndexOptions, QueryRunner } from '@n8n/typeorm'; +import { Table, TableColumn, TableForeignKey } from '@n8n/typeorm'; import LazyPromise from 'p-lazy'; import { Column } from './Column'; import { ApplicationError } from 'n8n-workflow'; diff --git a/packages/cli/src/databases/dsl/index.ts b/packages/cli/src/databases/dsl/index.ts index 2e108c0ef762a..bb5ef859ecf8f 100644 --- a/packages/cli/src/databases/dsl/index.ts +++ b/packages/cli/src/databases/dsl/index.ts @@ -1,4 +1,4 @@ -import type { QueryRunner } from 'typeorm'; +import type { QueryRunner } from '@n8n/typeorm'; import { Column } from './Column'; import { AddColumns, diff --git a/packages/cli/src/databases/entities/AbstractEntity.ts b/packages/cli/src/databases/entities/AbstractEntity.ts index 3c64a43aefa2e..48ed0079c5b34 100644 --- a/packages/cli/src/databases/entities/AbstractEntity.ts +++ b/packages/cli/src/databases/entities/AbstractEntity.ts @@ -1,11 +1,11 @@ -import type { ColumnOptions } from 'typeorm'; +import type { ColumnOptions } from '@n8n/typeorm'; import { BeforeInsert, BeforeUpdate, CreateDateColumn, PrimaryColumn, UpdateDateColumn, -} from 'typeorm'; +} from '@n8n/typeorm'; import config from '@/config'; import type { Class } from 'n8n-core'; import { generateNanoId } from '../utils/generators'; diff --git a/packages/cli/src/databases/entities/AuthIdentity.ts b/packages/cli/src/databases/entities/AuthIdentity.ts index 47289c9306b9b..ba3dd9d1c4052 100644 --- a/packages/cli/src/databases/entities/AuthIdentity.ts +++ b/packages/cli/src/databases/entities/AuthIdentity.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryColumn, Unique } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryColumn, Unique } from '@n8n/typeorm'; import { WithTimestamps } from './AbstractEntity'; import { User } from './User'; diff --git a/packages/cli/src/databases/entities/AuthProviderSyncHistory.ts b/packages/cli/src/databases/entities/AuthProviderSyncHistory.ts index 51ef0077770f2..bb899b9da677e 100644 --- a/packages/cli/src/databases/entities/AuthProviderSyncHistory.ts +++ b/packages/cli/src/databases/entities/AuthProviderSyncHistory.ts @@ -1,4 +1,4 @@ -import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm'; +import { Column, Entity, PrimaryGeneratedColumn } from '@n8n/typeorm'; import { datetimeColumnType } from './AbstractEntity'; import { AuthProviderType } from './AuthIdentity'; diff --git a/packages/cli/src/databases/entities/CredentialsEntity.ts b/packages/cli/src/databases/entities/CredentialsEntity.ts index 23a2e70760e24..cc365c2e7009e 100644 --- a/packages/cli/src/databases/entities/CredentialsEntity.ts +++ b/packages/cli/src/databases/entities/CredentialsEntity.ts @@ -1,5 +1,5 @@ import type { ICredentialNodeAccess } from 'n8n-workflow'; -import { Column, Entity, Index, OneToMany } from 'typeorm'; +import { Column, Entity, Index, OneToMany } from '@n8n/typeorm'; import { IsArray, IsObject, IsString, Length } from 'class-validator'; import type { SharedCredentials } from './SharedCredentials'; import { WithTimestampsAndStringId, jsonColumnType } from './AbstractEntity'; diff --git a/packages/cli/src/databases/entities/EventDestinations.ts b/packages/cli/src/databases/entities/EventDestinations.ts index b302e9a1a8015..27e594efa1707 100644 --- a/packages/cli/src/databases/entities/EventDestinations.ts +++ b/packages/cli/src/databases/entities/EventDestinations.ts @@ -1,5 +1,5 @@ import { MessageEventBusDestinationOptions } from 'n8n-workflow'; -import { Column, Entity, PrimaryColumn } from 'typeorm'; +import { Column, Entity, PrimaryColumn } from '@n8n/typeorm'; import { WithTimestamps, jsonColumnType } from './AbstractEntity'; @Entity({ name: 'event_destinations' }) diff --git a/packages/cli/src/databases/entities/ExecutionData.ts b/packages/cli/src/databases/entities/ExecutionData.ts index 06143c74fb839..5a1bd0cf587ce 100644 --- a/packages/cli/src/databases/entities/ExecutionData.ts +++ b/packages/cli/src/databases/entities/ExecutionData.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import { idStringifier } from '../utils/transformers'; import { ExecutionEntity } from './ExecutionEntity'; import { jsonColumnType } from './AbstractEntity'; diff --git a/packages/cli/src/databases/entities/ExecutionEntity.ts b/packages/cli/src/databases/entities/ExecutionEntity.ts index d73267ee52003..14fad4d50e905 100644 --- a/packages/cli/src/databases/entities/ExecutionEntity.ts +++ b/packages/cli/src/databases/entities/ExecutionEntity.ts @@ -10,7 +10,7 @@ import { PrimaryColumn, Relation, DeleteDateColumn, -} from 'typeorm'; +} from '@n8n/typeorm'; import { datetimeColumnType } from './AbstractEntity'; import { idStringifier } from '../utils/transformers'; import type { ExecutionData } from './ExecutionData'; diff --git a/packages/cli/src/databases/entities/ExecutionMetadata.ts b/packages/cli/src/databases/entities/ExecutionMetadata.ts index 99ea8e01cec24..6e316ecfd7f04 100644 --- a/packages/cli/src/databases/entities/ExecutionMetadata.ts +++ b/packages/cli/src/databases/entities/ExecutionMetadata.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryGeneratedColumn, RelationId } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryGeneratedColumn, RelationId } from '@n8n/typeorm'; import { ExecutionEntity } from './ExecutionEntity'; @Entity() diff --git a/packages/cli/src/databases/entities/InstalledNodes.ts b/packages/cli/src/databases/entities/InstalledNodes.ts index 7f2e94b6bca59..f3eee3ab9a7e6 100644 --- a/packages/cli/src/databases/entities/InstalledNodes.ts +++ b/packages/cli/src/databases/entities/InstalledNodes.ts @@ -1,4 +1,4 @@ -import { Column, Entity, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Column, Entity, JoinColumn, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import { InstalledPackages } from './InstalledPackages'; @Entity() diff --git a/packages/cli/src/databases/entities/InstalledPackages.ts b/packages/cli/src/databases/entities/InstalledPackages.ts index c0c06a00da63d..38cd52cd31c2f 100644 --- a/packages/cli/src/databases/entities/InstalledPackages.ts +++ b/packages/cli/src/databases/entities/InstalledPackages.ts @@ -1,4 +1,4 @@ -import { Column, Entity, JoinColumn, OneToMany, PrimaryColumn } from 'typeorm'; +import { Column, Entity, JoinColumn, OneToMany, PrimaryColumn } from '@n8n/typeorm'; import type { InstalledNodes } from './InstalledNodes'; import { WithTimestamps } from './AbstractEntity'; diff --git a/packages/cli/src/databases/entities/Settings.ts b/packages/cli/src/databases/entities/Settings.ts index 7f1f99cd1e1e3..8fcb38325a43d 100644 --- a/packages/cli/src/databases/entities/Settings.ts +++ b/packages/cli/src/databases/entities/Settings.ts @@ -1,5 +1,5 @@ import type { IDataObject } from 'n8n-workflow'; -import { Column, Entity, PrimaryColumn } from 'typeorm'; +import { Column, Entity, PrimaryColumn } from '@n8n/typeorm'; interface ISettingsDb { key: string; diff --git a/packages/cli/src/databases/entities/SharedCredentials.ts b/packages/cli/src/databases/entities/SharedCredentials.ts index 1685732004229..e43f3031d88f7 100644 --- a/packages/cli/src/databases/entities/SharedCredentials.ts +++ b/packages/cli/src/databases/entities/SharedCredentials.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import { CredentialsEntity } from './CredentialsEntity'; import { User } from './User'; import { WithTimestamps } from './AbstractEntity'; diff --git a/packages/cli/src/databases/entities/SharedWorkflow.ts b/packages/cli/src/databases/entities/SharedWorkflow.ts index adb94beb5ab40..d5681f6467eae 100644 --- a/packages/cli/src/databases/entities/SharedWorkflow.ts +++ b/packages/cli/src/databases/entities/SharedWorkflow.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import { WorkflowEntity } from './WorkflowEntity'; import { User } from './User'; import { WithTimestamps } from './AbstractEntity'; diff --git a/packages/cli/src/databases/entities/TagEntity.ts b/packages/cli/src/databases/entities/TagEntity.ts index c425b505ba5c5..5e7954df6665e 100644 --- a/packages/cli/src/databases/entities/TagEntity.ts +++ b/packages/cli/src/databases/entities/TagEntity.ts @@ -1,4 +1,4 @@ -import { Column, Entity, Index, ManyToMany, OneToMany } from 'typeorm'; +import { Column, Entity, Index, ManyToMany, OneToMany } from '@n8n/typeorm'; import { IsString, Length } from 'class-validator'; import type { WorkflowEntity } from './WorkflowEntity'; import type { WorkflowTagMapping } from './WorkflowTagMapping'; diff --git a/packages/cli/src/databases/entities/User.ts b/packages/cli/src/databases/entities/User.ts index 9a1a96d5f7248..a9da54fe0dea5 100644 --- a/packages/cli/src/databases/entities/User.ts +++ b/packages/cli/src/databases/entities/User.ts @@ -8,7 +8,7 @@ import { OneToMany, PrimaryGeneratedColumn, BeforeInsert, -} from 'typeorm'; +} from '@n8n/typeorm'; import { IsEmail, IsString, Length } from 'class-validator'; import type { IUser, IUserSettings } from 'n8n-workflow'; import type { SharedWorkflow } from './SharedWorkflow'; diff --git a/packages/cli/src/databases/entities/Variables.ts b/packages/cli/src/databases/entities/Variables.ts index 42f52c4ace51f..da8272853f73b 100644 --- a/packages/cli/src/databases/entities/Variables.ts +++ b/packages/cli/src/databases/entities/Variables.ts @@ -1,4 +1,4 @@ -import { Column, Entity } from 'typeorm'; +import { Column, Entity } from '@n8n/typeorm'; import { WithStringId } from './AbstractEntity'; @Entity() diff --git a/packages/cli/src/databases/entities/WebhookEntity.ts b/packages/cli/src/databases/entities/WebhookEntity.ts index 89b863d427cbd..b8dafabe2b897 100644 --- a/packages/cli/src/databases/entities/WebhookEntity.ts +++ b/packages/cli/src/databases/entities/WebhookEntity.ts @@ -1,5 +1,5 @@ import { IHttpRequestMethods } from 'n8n-workflow'; -import { Column, Entity, Index, PrimaryColumn } from 'typeorm'; +import { Column, Entity, Index, PrimaryColumn } from '@n8n/typeorm'; @Entity() @Index(['webhookId', 'method', 'pathLength']) diff --git a/packages/cli/src/databases/entities/WorkflowEntity.ts b/packages/cli/src/databases/entities/WorkflowEntity.ts index 332b71ecd55d5..0b1fb576f4325 100644 --- a/packages/cli/src/databases/entities/WorkflowEntity.ts +++ b/packages/cli/src/databases/entities/WorkflowEntity.ts @@ -3,7 +3,7 @@ import { Length } from 'class-validator'; import { IConnections, IDataObject, IWorkflowSettings, WorkflowFEMeta } from 'n8n-workflow'; import type { IBinaryKeyData, INode, IPairedItemData } from 'n8n-workflow'; -import { Column, Entity, Index, JoinColumn, JoinTable, ManyToMany, OneToMany } from 'typeorm'; +import { Column, Entity, Index, JoinColumn, JoinTable, ManyToMany, OneToMany } from '@n8n/typeorm'; import config from '@/config'; import type { TagEntity } from './TagEntity'; diff --git a/packages/cli/src/databases/entities/WorkflowEntityWithVersion.ts b/packages/cli/src/databases/entities/WorkflowEntityWithVersion.ts index 995909da833bf..e45fb9ab2559c 100644 --- a/packages/cli/src/databases/entities/WorkflowEntityWithVersion.ts +++ b/packages/cli/src/databases/entities/WorkflowEntityWithVersion.ts @@ -1,7 +1,7 @@ import { IConnections } from 'n8n-workflow'; import type { IDataObject, INode, IWorkflowSettings } from 'n8n-workflow'; -import { Column, Entity, PrimaryColumn } from 'typeorm'; +import { Column, Entity, PrimaryColumn } from '@n8n/typeorm'; import { IsDate } from 'class-validator'; import { WithTimestampsAndStringId, jsonColumnType } from './AbstractEntity'; diff --git a/packages/cli/src/databases/entities/WorkflowHistory.ts b/packages/cli/src/databases/entities/WorkflowHistory.ts index a1db4ed177cae..b66bbff21014c 100644 --- a/packages/cli/src/databases/entities/WorkflowHistory.ts +++ b/packages/cli/src/databases/entities/WorkflowHistory.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import { WithTimestamps, jsonColumnType } from './AbstractEntity'; import { IConnections } from 'n8n-workflow'; import type { INode } from 'n8n-workflow'; diff --git a/packages/cli/src/databases/entities/WorkflowStatistics.ts b/packages/cli/src/databases/entities/WorkflowStatistics.ts index 177000a0b02af..5bdeffcb7cae4 100644 --- a/packages/cli/src/databases/entities/WorkflowStatistics.ts +++ b/packages/cli/src/databases/entities/WorkflowStatistics.ts @@ -1,4 +1,4 @@ -import { Column, Entity, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Column, Entity, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import { datetimeColumnType } from './AbstractEntity'; import { WorkflowEntity } from './WorkflowEntity'; diff --git a/packages/cli/src/databases/entities/WorkflowTagMapping.ts b/packages/cli/src/databases/entities/WorkflowTagMapping.ts index 69f74bc8a60f3..2f0727272c638 100644 --- a/packages/cli/src/databases/entities/WorkflowTagMapping.ts +++ b/packages/cli/src/databases/entities/WorkflowTagMapping.ts @@ -1,4 +1,4 @@ -import { Entity, JoinColumn, ManyToOne, PrimaryColumn } from 'typeorm'; +import { Entity, JoinColumn, ManyToOne, PrimaryColumn } from '@n8n/typeorm'; import type { TagEntity } from './TagEntity'; import type { WorkflowEntity } from './WorkflowEntity'; diff --git a/packages/cli/src/databases/migrations/common/1705429061930-DropRoleMapping.ts b/packages/cli/src/databases/migrations/common/1705429061930-DropRoleMapping.ts index dd2eafbc7875d..2fa897707aee9 100644 --- a/packages/cli/src/databases/migrations/common/1705429061930-DropRoleMapping.ts +++ b/packages/cli/src/databases/migrations/common/1705429061930-DropRoleMapping.ts @@ -61,20 +61,23 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration { const isMySQL = ['mariadb', 'mysqldb'].includes(dbType); const roleField = isMySQL ? `CONCAT('${scope}:', R.name)` : `'${scope}:' || R.name`; const subQuery = ` - SELECT ${roleField} as role, T.${idColumn} as id${table !== 'user' ? `, T.${uidColumn} as uid` : ''} + SELECT ${roleField} as role, T.${idColumn} as id${ + table !== 'user' ? `, T.${uidColumn} as uid` : '' + } FROM ${tableName} T LEFT JOIN ${roleTable} R ON T.${roleColumn} = R.id and R.scope = '${scope}'`; + const where = `WHERE ${tableName}.${idColumn} = mapping.id${ + table !== 'user' ? ` AND ${tableName}.${uidColumn} = mapping.uid` : '' + }`; const swQuery = isMySQL ? `UPDATE ${tableName}, (${subQuery}) as mapping SET ${tableName}.role = mapping.role - WHERE ${tableName}.${idColumn} = mapping.id` + ${where}` : `UPDATE ${tableName} SET role = mapping.role FROM (${subQuery}) as mapping - WHERE ${tableName}.${idColumn} = mapping.id${table !== 'user' ? ` AND ${tableName}.${uidColumn} = mapping.uid` : ''}`; - - + ${where}`; await runQuery(swQuery); await addNotNull(table, 'role'); @@ -104,7 +107,7 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration { const roleTable = escape.tableName('role'); const tableName = escape.tableName(table); const idColumn = escape.columnName(idColumns[table]); - const uidColumn = escape.columnName(idColumns[table]); + const uidColumn = escape.columnName(uidColumns[table]); const roleColumn = escape.columnName(roleColumnName); const scope = roleScopes[table]; const isMySQL = ['mariadb', 'mysqldb'].includes(dbType); @@ -114,14 +117,17 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration { FROM ${tableName} T LEFT JOIN ${roleTable} R ON T.role = ${roleField} and R.scope = '${scope}'`; + const where = `WHERE ${tableName}.${idColumn} = mapping.id${ + table !== 'user' ? ` AND ${tableName}.${uidColumn} = mapping.uid` : '' + }`; const query = isMySQL ? `UPDATE ${tableName}, (${subQuery}) as mapping SET ${tableName}.${roleColumn} = mapping.role_id - WHERE ${tableName}.${idColumn} = mapping.id` + ${where}` : `UPDATE ${tableName} SET ${roleColumn} = mapping.role_id FROM (${subQuery}) as mapping - WHERE ${tableName}.${idColumn} = mapping.id${table !== 'user' ? ` AND ${tableName}.${uidColumn} = mapping.uid` : ''}`; + ${where}`; await runQuery(query); await addNotNull(table, roleColumnName); diff --git a/packages/cli/src/databases/repositories/authIdentity.repository.ts b/packages/cli/src/databases/repositories/authIdentity.repository.ts index 6ec5fe2310b85..d285d6858204e 100644 --- a/packages/cli/src/databases/repositories/authIdentity.repository.ts +++ b/packages/cli/src/databases/repositories/authIdentity.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { AuthIdentity } from '../entities/AuthIdentity'; @Service() diff --git a/packages/cli/src/databases/repositories/authProviderSyncHistory.repository.ts b/packages/cli/src/databases/repositories/authProviderSyncHistory.repository.ts index 092f4273ee00e..86bacfae1b43c 100644 --- a/packages/cli/src/databases/repositories/authProviderSyncHistory.repository.ts +++ b/packages/cli/src/databases/repositories/authProviderSyncHistory.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { AuthProviderSyncHistory } from '../entities/AuthProviderSyncHistory'; @Service() diff --git a/packages/cli/src/databases/repositories/credentials.repository.ts b/packages/cli/src/databases/repositories/credentials.repository.ts index 4c6e75cfcdcd1..0b11b4015122f 100644 --- a/packages/cli/src/databases/repositories/credentials.repository.ts +++ b/packages/cli/src/databases/repositories/credentials.repository.ts @@ -1,6 +1,6 @@ import { Service } from 'typedi'; -import { DataSource, In, Not, Repository, Like } from 'typeorm'; -import type { FindManyOptions, DeleteResult, EntityManager, FindOptionsWhere } from 'typeorm'; +import { DataSource, In, Not, Repository, Like } from '@n8n/typeorm'; +import type { FindManyOptions, DeleteResult, EntityManager, FindOptionsWhere } from '@n8n/typeorm'; import { CredentialsEntity } from '../entities/CredentialsEntity'; import { SharedCredentials } from '../entities/SharedCredentials'; import type { ListQuery } from '@/requests'; diff --git a/packages/cli/src/databases/repositories/eventDestinations.repository.ts b/packages/cli/src/databases/repositories/eventDestinations.repository.ts index 627a9638f18e5..98882bbb509c9 100644 --- a/packages/cli/src/databases/repositories/eventDestinations.repository.ts +++ b/packages/cli/src/databases/repositories/eventDestinations.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { EventDestinations } from '../entities/EventDestinations'; @Service() diff --git a/packages/cli/src/databases/repositories/execution.repository.ts b/packages/cli/src/databases/repositories/execution.repository.ts index 4a0f712be216b..dd04ec673fa5c 100644 --- a/packages/cli/src/databases/repositories/execution.repository.ts +++ b/packages/cli/src/databases/repositories/execution.repository.ts @@ -10,15 +10,15 @@ import { Not, Raw, Repository, -} from 'typeorm'; -import { DateUtils } from 'typeorm/util/DateUtils'; +} from '@n8n/typeorm'; +import { DateUtils } from '@n8n/typeorm/util/DateUtils'; import type { FindManyOptions, FindOneOptions, FindOperator, FindOptionsWhere, SelectQueryBuilder, -} from 'typeorm'; +} from '@n8n/typeorm'; import { parse, stringify } from 'flatted'; import { ApplicationError, diff --git a/packages/cli/src/databases/repositories/executionData.repository.ts b/packages/cli/src/databases/repositories/executionData.repository.ts index 3eebab012263c..5872f9888cd66 100644 --- a/packages/cli/src/databases/repositories/executionData.repository.ts +++ b/packages/cli/src/databases/repositories/executionData.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, In, Repository } from 'typeorm'; +import { DataSource, In, Repository } from '@n8n/typeorm'; import { ExecutionData } from '../entities/ExecutionData'; @Service() diff --git a/packages/cli/src/databases/repositories/executionMetadata.repository.ts b/packages/cli/src/databases/repositories/executionMetadata.repository.ts index 917ce755c81c0..d80cca12a6115 100644 --- a/packages/cli/src/databases/repositories/executionMetadata.repository.ts +++ b/packages/cli/src/databases/repositories/executionMetadata.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { ExecutionMetadata } from '../entities/ExecutionMetadata'; @Service() diff --git a/packages/cli/src/databases/repositories/installedNodes.repository.ts b/packages/cli/src/databases/repositories/installedNodes.repository.ts index 021535c31fd88..6750f5dc84592 100644 --- a/packages/cli/src/databases/repositories/installedNodes.repository.ts +++ b/packages/cli/src/databases/repositories/installedNodes.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { InstalledNodes } from '../entities/InstalledNodes'; @Service() diff --git a/packages/cli/src/databases/repositories/installedPackages.repository.ts b/packages/cli/src/databases/repositories/installedPackages.repository.ts index 743528e35fcf9..4dd4baaed5305 100644 --- a/packages/cli/src/databases/repositories/installedPackages.repository.ts +++ b/packages/cli/src/databases/repositories/installedPackages.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { InstalledPackages } from '../entities/InstalledPackages'; import { InstalledNodesRepository } from './installedNodes.repository'; import type { PackageDirectoryLoader } from 'n8n-core'; diff --git a/packages/cli/src/databases/repositories/settings.repository.ts b/packages/cli/src/databases/repositories/settings.repository.ts index a213ee78a3018..937b56b2c4300 100644 --- a/packages/cli/src/databases/repositories/settings.repository.ts +++ b/packages/cli/src/databases/repositories/settings.repository.ts @@ -1,6 +1,6 @@ import { EXTERNAL_SECRETS_DB_KEY } from '@/ExternalSecrets/constants'; import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; import { Settings } from '../entities/Settings'; import config from '@/config'; diff --git a/packages/cli/src/databases/repositories/sharedCredentials.repository.ts b/packages/cli/src/databases/repositories/sharedCredentials.repository.ts index 0a52f521538d5..f6e5b1946a199 100644 --- a/packages/cli/src/databases/repositories/sharedCredentials.repository.ts +++ b/packages/cli/src/databases/repositories/sharedCredentials.repository.ts @@ -1,7 +1,7 @@ import { Service } from 'typedi'; -import type { EntityManager } from 'typeorm'; -import { DataSource, In, Not, Repository } from 'typeorm'; -import { SharedCredentials } from '../entities/SharedCredentials'; +import type { EntityManager } from '@n8n/typeorm'; +import { DataSource, In, Not, Repository } from '@n8n/typeorm'; +import { type CredentialSharingRole, SharedCredentials } from '../entities/SharedCredentials'; import type { User } from '../entities/User'; @Service() @@ -36,27 +36,27 @@ export class SharedCredentialsRepository extends Repository { return await this.update({ userId: Not(user.id), role: 'credential:owner' }, { user }); } - /** - * Get the IDs of all credentials owned by or shared with a user. - */ - async getAccessibleCredentials(userId: string) { - const sharings = await this.find({ - where: { - userId, - role: In(['credential:owner', 'credential:user']), - }, - }); + /** Get the IDs of all credentials owned by a user */ + async getOwnedCredentialIds(userIds: string[]) { + return await this.getCredentialIdsByUserAndRole(userIds, ['credential:owner']); + } - return sharings.map((s) => s.credentialsId); + /** Get the IDs of all credentials owned by or shared with a user */ + async getAccessibleCredentialIds(userIds: string[]) { + return await this.getCredentialIdsByUserAndRole(userIds, [ + 'credential:owner', + 'credential:user', + ]); } - async findOwnedSharings(userIds: string[]) { - return await this.find({ + private async getCredentialIdsByUserAndRole(userIds: string[], roles: CredentialSharingRole[]) { + const sharings = await this.find({ where: { userId: In(userIds), - role: 'credential:owner', + role: In(roles), }, }); + return sharings.map((s) => s.credentialsId); } async deleteByIds(transaction: EntityManager, sharedCredentialsIds: string[], user?: User) { diff --git a/packages/cli/src/databases/repositories/sharedWorkflow.repository.ts b/packages/cli/src/databases/repositories/sharedWorkflow.repository.ts index e3d321cab4509..3716daa45ecad 100644 --- a/packages/cli/src/databases/repositories/sharedWorkflow.repository.ts +++ b/packages/cli/src/databases/repositories/sharedWorkflow.repository.ts @@ -1,6 +1,6 @@ import { Service } from 'typedi'; -import { DataSource, Repository, In, Not } from 'typeorm'; -import type { EntityManager, FindManyOptions, FindOptionsWhere } from 'typeorm'; +import { DataSource, Repository, In, Not } from '@n8n/typeorm'; +import type { EntityManager, FindManyOptions, FindOptionsWhere } from '@n8n/typeorm'; import { SharedWorkflow, type WorkflowSharingRole } from '../entities/SharedWorkflow'; import { type User } from '../entities/User'; import type { Scope } from '@n8n/permissions'; @@ -22,6 +22,15 @@ export class SharedWorkflowRepository extends Repository { return await this.exist({ where }); } + /** Get the IDs of all users this workflow is shared with */ + async getSharedUserIds(workflowId: string) { + const sharedWorkflows = await this.find({ + select: ['userId'], + where: { workflowId }, + }); + return sharedWorkflows.map((sharing) => sharing.userId); + } + async getSharedWorkflowIds(workflowIds: string[]) { const sharedWorkflows = await this.find({ select: ['workflowId'], diff --git a/packages/cli/src/databases/repositories/tag.repository.ts b/packages/cli/src/databases/repositories/tag.repository.ts index ff41a28b03315..1667ff29839f9 100644 --- a/packages/cli/src/databases/repositories/tag.repository.ts +++ b/packages/cli/src/databases/repositories/tag.repository.ts @@ -1,6 +1,6 @@ import { Service } from 'typedi'; -import type { EntityManager } from 'typeorm'; -import { DataSource, In, Repository } from 'typeorm'; +import type { EntityManager } from '@n8n/typeorm'; +import { DataSource, In, Repository } from '@n8n/typeorm'; import { TagEntity } from '../entities/TagEntity'; import type { WorkflowEntity } from '../entities/WorkflowEntity'; import intersection from 'lodash/intersection'; diff --git a/packages/cli/src/databases/repositories/usageMetrics.repository.ts b/packages/cli/src/databases/repositories/usageMetrics.repository.ts index 23f9ef34dc3dd..b0d88fc0a6cc1 100644 --- a/packages/cli/src/databases/repositories/usageMetrics.repository.ts +++ b/packages/cli/src/databases/repositories/usageMetrics.repository.ts @@ -1,6 +1,6 @@ import config from '@/config'; import { Service } from 'typedi'; -import { DataSource, Repository, Entity } from 'typeorm'; +import { DataSource, Repository, Entity } from '@n8n/typeorm'; @Entity() export class UsageMetrics {} diff --git a/packages/cli/src/databases/repositories/user.repository.ts b/packages/cli/src/databases/repositories/user.repository.ts index fd0039bb6c209..6b81f8984bff0 100644 --- a/packages/cli/src/databases/repositories/user.repository.ts +++ b/packages/cli/src/databases/repositories/user.repository.ts @@ -1,6 +1,6 @@ import { Service } from 'typedi'; -import type { EntityManager, FindManyOptions } from 'typeorm'; -import { DataSource, In, IsNull, Not, Repository } from 'typeorm'; +import type { EntityManager, FindManyOptions } from '@n8n/typeorm'; +import { DataSource, In, IsNull, Not, Repository } from '@n8n/typeorm'; import type { ListQuery } from '@/requests'; import { type GlobalRole, User } from '../entities/User'; diff --git a/packages/cli/src/databases/repositories/variables.repository.ts b/packages/cli/src/databases/repositories/variables.repository.ts index d787a8b98431e..e2a321ce576af 100644 --- a/packages/cli/src/databases/repositories/variables.repository.ts +++ b/packages/cli/src/databases/repositories/variables.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { Variables } from '../entities/Variables'; @Service() diff --git a/packages/cli/src/databases/repositories/webhook.repository.ts b/packages/cli/src/databases/repositories/webhook.repository.ts index 64bb49a643ecb..9f211ec0ad746 100644 --- a/packages/cli/src/databases/repositories/webhook.repository.ts +++ b/packages/cli/src/databases/repositories/webhook.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { WebhookEntity } from '../entities/WebhookEntity'; @Service() diff --git a/packages/cli/src/databases/repositories/workflow.repository.ts b/packages/cli/src/databases/repositories/workflow.repository.ts index addec8802cf65..3331b92f96bee 100644 --- a/packages/cli/src/databases/repositories/workflow.repository.ts +++ b/packages/cli/src/databases/repositories/workflow.repository.ts @@ -11,7 +11,7 @@ import { type EntityManager, type DeleteResult, Not, -} from 'typeorm'; +} from '@n8n/typeorm'; import type { ListQuery } from '@/requests'; import { isStringArray } from '@/utils'; import config from '@/config'; diff --git a/packages/cli/src/databases/repositories/workflowHistory.repository.ts b/packages/cli/src/databases/repositories/workflowHistory.repository.ts index bc21b3dca7d2f..4eb05b60fed1d 100644 --- a/packages/cli/src/databases/repositories/workflowHistory.repository.ts +++ b/packages/cli/src/databases/repositories/workflowHistory.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, LessThan, Repository } from 'typeorm'; +import { DataSource, LessThan, Repository } from '@n8n/typeorm'; import { WorkflowHistory } from '../entities/WorkflowHistory'; @Service() diff --git a/packages/cli/src/databases/repositories/workflowStatistics.repository.ts b/packages/cli/src/databases/repositories/workflowStatistics.repository.ts index 67601c00f1c40..0faef01840aec 100644 --- a/packages/cli/src/databases/repositories/workflowStatistics.repository.ts +++ b/packages/cli/src/databases/repositories/workflowStatistics.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, QueryFailedError, Repository } from 'typeorm'; +import { DataSource, QueryFailedError, Repository } from '@n8n/typeorm'; import config from '@/config'; import { StatisticsNames, WorkflowStatistics } from '../entities/WorkflowStatistics'; import type { User } from '@/databases/entities/User'; diff --git a/packages/cli/src/databases/repositories/workflowTagMapping.repository.ts b/packages/cli/src/databases/repositories/workflowTagMapping.repository.ts index c3a45e862459f..6bf4aa6b2a80e 100644 --- a/packages/cli/src/databases/repositories/workflowTagMapping.repository.ts +++ b/packages/cli/src/databases/repositories/workflowTagMapping.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { WorkflowTagMapping } from '../entities/WorkflowTagMapping'; @Service() @@ -7,4 +7,14 @@ export class WorkflowTagMappingRepository extends Repository constructor(dataSource: DataSource) { super(WorkflowTagMapping, dataSource.manager); } + + async overwriteTaggings(workflowId: string, tagIds: string[]) { + return await this.manager.transaction(async (tx) => { + await tx.delete(WorkflowTagMapping, { workflowId }); + + const taggings = tagIds.map((tagId) => this.create({ workflowId, tagId })); + + return await tx.insert(WorkflowTagMapping, taggings); + }); + } } diff --git a/packages/cli/src/databases/repositories/workflowWithVersion.repository.ts b/packages/cli/src/databases/repositories/workflowWithVersion.repository.ts index 371094fc4c023..3c0eb6105cac2 100644 --- a/packages/cli/src/databases/repositories/workflowWithVersion.repository.ts +++ b/packages/cli/src/databases/repositories/workflowWithVersion.repository.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { DataSource, Repository } from 'typeorm'; +import { DataSource, Repository } from '@n8n/typeorm'; import { WorkflowEntityWithVersion } from '../entities/WorkflowEntityWithVersion'; @Service() diff --git a/packages/cli/src/databases/types.ts b/packages/cli/src/databases/types.ts index 31f9e64bfbde7..3f030c009e5b1 100644 --- a/packages/cli/src/databases/types.ts +++ b/packages/cli/src/databases/types.ts @@ -1,5 +1,5 @@ import type { INodeTypes } from 'n8n-workflow'; -import type { QueryRunner, ObjectLiteral } from 'typeorm'; +import type { QueryRunner, ObjectLiteral } from '@n8n/typeorm'; import type { Logger } from '@/Logger'; import type { createSchemaBuilder } from './dsl'; @@ -61,4 +61,4 @@ export interface Migration extends Function { export type InsertResult = Array<{ insertId: number }>; -export { QueryFailedError } from 'typeorm/error/QueryFailedError'; +export { QueryFailedError } from '@n8n/typeorm/error/QueryFailedError'; diff --git a/packages/cli/src/databases/utils/commandHelpers.ts b/packages/cli/src/databases/utils/commandHelpers.ts index 775d6cb6edcaf..f522da354e1a7 100644 --- a/packages/cli/src/databases/utils/commandHelpers.ts +++ b/packages/cli/src/databases/utils/commandHelpers.ts @@ -1,6 +1,6 @@ import type { WorkflowEntity } from '@db/entities/WorkflowEntity'; import type { CredentialsEntity } from '@db/entities/CredentialsEntity'; -import { getMetadataArgsStorage } from 'typeorm'; +import { getMetadataArgsStorage } from '@n8n/typeorm'; export const disableAutoGeneratedIds = ( entityClass: typeof WorkflowEntity | typeof CredentialsEntity, diff --git a/packages/cli/src/databases/utils/migrationHelpers.ts b/packages/cli/src/databases/utils/migrationHelpers.ts index 6b164c5797d84..0e5345d8b31d4 100644 --- a/packages/cli/src/databases/utils/migrationHelpers.ts +++ b/packages/cli/src/databases/utils/migrationHelpers.ts @@ -1,8 +1,8 @@ import { Container } from 'typedi'; import { readFileSync, rmSync } from 'fs'; import { InstanceSettings } from 'n8n-core'; -import type { ObjectLiteral } from 'typeorm'; -import type { QueryRunner } from 'typeorm/query-runner/QueryRunner'; +import type { ObjectLiteral } from '@n8n/typeorm'; +import type { QueryRunner } from '@n8n/typeorm/query-runner/QueryRunner'; import { ApplicationError, jsonParse } from 'n8n-workflow'; import config from '@/config'; import { inTest } from '@/constants'; diff --git a/packages/cli/src/databases/utils/transformers.ts b/packages/cli/src/databases/utils/transformers.ts index df084889565df..2ddd784c750c4 100644 --- a/packages/cli/src/databases/utils/transformers.ts +++ b/packages/cli/src/databases/utils/transformers.ts @@ -1,5 +1,5 @@ import { jsonParse } from 'n8n-workflow'; -import type { ValueTransformer, FindOperator } from 'typeorm'; +import type { ValueTransformer, FindOperator } from '@n8n/typeorm'; import config from '@/config'; export const idStringifier = { diff --git a/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts b/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts index 2fe8525996d93..e4f464d322787 100644 --- a/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts +++ b/packages/cli/src/environments/sourceControl/sourceControlImport.service.ee.ts @@ -8,7 +8,7 @@ import { SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, } from './constants'; import glob from 'fast-glob'; -import { ApplicationError, jsonParse } from 'n8n-workflow'; +import { ApplicationError, jsonParse, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow'; import { readFile as fsReadFile } from 'fs/promises'; import { Credentials, InstanceSettings } from 'n8n-core'; import type { IWorkflowToImport } from '@/Interfaces'; @@ -18,7 +18,7 @@ import { SharedCredentials } from '@db/entities/SharedCredentials'; import type { WorkflowTagMapping } from '@db/entities/WorkflowTagMapping'; import type { TagEntity } from '@db/entities/TagEntity'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; -import { In } from 'typeorm'; +import { In } from '@n8n/typeorm'; import { isUniqueConstraintError } from '@/ResponseHelper'; import type { SourceControlWorkflowVersionId } from './types/sourceControlWorkflowVersionId'; import { getCredentialExportPath, getWorkflowExportPath } from './sourceControlHelper.ee'; @@ -87,14 +87,28 @@ export class SourceControlImportService { const localWorkflows = await Container.get(WorkflowRepository).find({ select: ['id', 'name', 'versionId', 'updatedAt'], }); - return localWorkflows.map((local) => ({ - id: local.id, - versionId: local.versionId, - name: local.name, - localId: local.id, - filename: getWorkflowExportPath(local.id, this.workflowExportFolder), - updatedAt: local.updatedAt.toISOString(), - })) as SourceControlWorkflowVersionId[]; + return localWorkflows.map((local) => { + let updatedAt: Date; + if (local.updatedAt instanceof Date) { + updatedAt = local.updatedAt; + } else { + ErrorReporter.warn('updatedAt is not a Date', { + extra: { + type: typeof local.updatedAt, + value: local.updatedAt, + }, + }); + updatedAt = isNaN(Date.parse(local.updatedAt)) ? new Date() : new Date(local.updatedAt); + } + return { + id: local.id, + versionId: local.versionId, + name: local.name, + localId: local.id, + filename: getWorkflowExportPath(local.id, this.workflowExportFolder), + updatedAt: updatedAt.toISOString(), + }; + }) as SourceControlWorkflowVersionId[]; } public async getRemoteCredentialsFromFiles(): Promise< diff --git a/packages/cli/src/eventbus/MessageEventBus/MessageEventBus.ts b/packages/cli/src/eventbus/MessageEventBus/MessageEventBus.ts index e6784327c4fe6..6c897cb38d4e6 100644 --- a/packages/cli/src/eventbus/MessageEventBus/MessageEventBus.ts +++ b/packages/cli/src/eventbus/MessageEventBus/MessageEventBus.ts @@ -1,6 +1,6 @@ import { Service } from 'typedi'; -import type { DeleteResult } from 'typeorm'; -import { In } from 'typeorm'; +import type { DeleteResult } from '@n8n/typeorm'; +import { In } from '@n8n/typeorm'; import EventEmitter from 'events'; import uniqby from 'lodash/uniqBy'; import { jsonParse } from 'n8n-workflow'; diff --git a/packages/cli/src/eventbus/index.ts b/packages/cli/src/eventbus/index.ts index 7118b57bd2464..b9a271bb81f18 100644 --- a/packages/cli/src/eventbus/index.ts +++ b/packages/cli/src/eventbus/index.ts @@ -1,4 +1,3 @@ -export { MessageEventBus } from './MessageEventBus/MessageEventBus'; export { EventMessageTypes } from './EventMessageClasses'; export { EventPayloadWorkflow } from './EventMessageClasses/EventMessageWorkflow'; export { METRICS_EVENT_NAME, getLabelsForEvent } from './MessageEventBusDestination/Helpers.ee'; diff --git a/packages/cli/src/requests.ts b/packages/cli/src/requests.ts index 868f5402a8b8a..173f04eec0908 100644 --- a/packages/cli/src/requests.ts +++ b/packages/cli/src/requests.ts @@ -161,7 +161,7 @@ export declare namespace CredentialRequest { type Test = AuthenticatedRequest<{}, {}, INodeCredentialTestRequest>; - type Share = AuthenticatedRequest<{ credentialId: string }, {}, { shareWithIds: string[] }>; + type Share = AuthenticatedRequest<{ id: string }, {}, { shareWithIds: string[] }>; } // ---------------------------------- diff --git a/packages/cli/src/services/credentials-tester.service.ts b/packages/cli/src/services/credentials-tester.service.ts new file mode 100644 index 0000000000000..59b4abb5db581 --- /dev/null +++ b/packages/cli/src/services/credentials-tester.service.ts @@ -0,0 +1,382 @@ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +/* eslint-disable @typescript-eslint/no-unsafe-return */ +/* eslint-disable @typescript-eslint/no-unsafe-call */ +import { Service } from 'typedi'; +import { NodeExecuteFunctions } from 'n8n-core'; +import get from 'lodash/get'; + +import type { + ICredentialsDecrypted, + ICredentialTestFunction, + ICredentialTestRequestData, + INode, + INodeCredentialTestResult, + INodeExecutionData, + INodeProperties, + INodeType, + IVersionedNodeType, + IRunExecutionData, + WorkflowExecuteMode, + ITaskDataConnections, + INodeTypeData, + INodeTypes, + ICredentialTestFunctions, +} from 'n8n-workflow'; +import { + VersionedNodeType, + NodeHelpers, + RoutingNode, + Workflow, + ErrorReporterProxy as ErrorReporter, + ApplicationError, +} from 'n8n-workflow'; + +import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'; +import type { User } from '@db/entities/User'; +import { NodeTypes } from '@/NodeTypes'; +import { CredentialTypes } from '@/CredentialTypes'; +import { RESPONSE_ERROR_MESSAGES } from '../constants'; +import { isObjectLiteral } from '../utils'; +import { Logger } from '@/Logger'; +import { CredentialsHelper } from '../CredentialsHelper'; + +const { OAUTH2_CREDENTIAL_TEST_SUCCEEDED, OAUTH2_CREDENTIAL_TEST_FAILED } = RESPONSE_ERROR_MESSAGES; + +const mockNodesData: INodeTypeData = { + mock: { + sourcePath: '', + type: { + description: { properties: [] as INodeProperties[] }, + } as INodeType, + }, +}; + +const mockNodeTypes: INodeTypes = { + getByName(nodeType: string): INodeType | IVersionedNodeType { + return mockNodesData[nodeType]?.type; + }, + getByNameAndVersion(nodeType: string, version?: number): INodeType { + if (!mockNodesData[nodeType]) { + throw new ApplicationError(RESPONSE_ERROR_MESSAGES.NO_NODE, { + tags: { nodeType }, + }); + } + return NodeHelpers.getVersionedNodeType(mockNodesData[nodeType].type, version); + }, +}; + +@Service() +export class CredentialsTester { + constructor( + private readonly logger: Logger, + private readonly credentialTypes: CredentialTypes, + private readonly nodeTypes: NodeTypes, + private readonly credentialsHelper: CredentialsHelper, + ) {} + + private static hasAccessToken(credentialsDecrypted: ICredentialsDecrypted) { + const oauthTokenData = credentialsDecrypted?.data?.oauthTokenData; + + if (!isObjectLiteral(oauthTokenData)) return false; + + return 'access_token' in oauthTokenData; + } + + private getCredentialTestFunction( + credentialType: string, + ): ICredentialTestFunction | ICredentialTestRequestData | undefined { + // Check if test is defined on credentials + const type = this.credentialTypes.getByName(credentialType); + if (type.test) { + return { + testRequest: type.test, + }; + } + + const supportedNodes = this.credentialTypes.getSupportedNodes(credentialType); + for (const nodeName of supportedNodes) { + const node = this.nodeTypes.getByName(nodeName); + + // Always set to an array even if node is not versioned to not having + // to duplicate the logic + const allNodeTypes: INodeType[] = []; + if (node instanceof VersionedNodeType) { + // Node is versioned + allNodeTypes.push(...Object.values(node.nodeVersions)); + } else { + // Node is not versioned + allNodeTypes.push(node as INodeType); + } + + // Check each of the node versions for credential tests + for (const nodeType of allNodeTypes) { + // Check each of teh credentials + for (const { name, testedBy } of nodeType.description.credentials ?? []) { + if ( + name === credentialType && + this.credentialTypes.getParentTypes(name).includes('oAuth2Api') + ) { + return async function oauth2CredTest( + this: ICredentialTestFunctions, + cred: ICredentialsDecrypted, + ): Promise { + return CredentialsTester.hasAccessToken(cred) + ? { + status: 'OK', + message: OAUTH2_CREDENTIAL_TEST_SUCCEEDED, + } + : { + status: 'Error', + message: OAUTH2_CREDENTIAL_TEST_FAILED, + }; + }; + } + + if (name === credentialType && !!testedBy) { + if (typeof testedBy === 'string') { + if (node instanceof VersionedNodeType) { + // The node is versioned. So check all versions for test function + // starting with the latest + const versions = Object.keys(node.nodeVersions).sort().reverse(); + for (const version of versions) { + const versionedNode = node.nodeVersions[parseInt(version, 10)]; + const credentialTest = versionedNode.methods?.credentialTest; + if (credentialTest && testedBy in credentialTest) { + return credentialTest[testedBy]; + } + } + } + // Test is defined as string which links to a function + return (node as unknown as INodeType).methods?.credentialTest![testedBy]; + } + + // Test is defined as JSON with a definition for the request to make + return { + nodeType, + testRequest: testedBy, + }; + } + } + } + } + + return undefined; + } + + async testCredentials( + user: User, + credentialType: string, + credentialsDecrypted: ICredentialsDecrypted, + ): Promise { + const credentialTestFunction = this.getCredentialTestFunction(credentialType); + if (credentialTestFunction === undefined) { + return { + status: 'Error', + message: 'No testing function found for this credential.', + }; + } + + if (credentialsDecrypted.data) { + try { + const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id); + credentialsDecrypted.data = this.credentialsHelper.applyDefaultsAndOverwrites( + additionalData, + credentialsDecrypted.data, + credentialType, + 'internal' as WorkflowExecuteMode, + undefined, + undefined, + user.hasGlobalScope('externalSecret:use'), + ); + } catch (error) { + this.logger.debug('Credential test failed', error); + return { + status: 'Error', + message: error.message.toString(), + }; + } + } + + if (typeof credentialTestFunction === 'function') { + // The credentials get tested via a function that is defined on the node + const credentialTestFunctions = NodeExecuteFunctions.getCredentialTestFunctions(); + + return credentialTestFunction.call(credentialTestFunctions, credentialsDecrypted); + } + + // Credentials get tested via request instructions + + // TODO: Temp workflows get created at multiple locations (for example also LoadNodeParameterOptions), + // check if some of them are identical enough that it can be combined + + let nodeType: INodeType; + if (credentialTestFunction.nodeType) { + nodeType = credentialTestFunction.nodeType; + } else { + nodeType = this.nodeTypes.getByNameAndVersion('n8n-nodes-base.noOp'); + } + + const node: INode = { + id: 'temp', + parameters: {}, + name: 'Temp-Node', + type: nodeType.description.name, + typeVersion: Array.isArray(nodeType.description.version) + ? nodeType.description.version.slice(-1)[0] + : nodeType.description.version, + position: [0, 0], + credentials: { + [credentialType]: { + id: credentialsDecrypted.id, + name: credentialsDecrypted.name, + }, + }, + }; + + const workflowData = { + nodes: [node], + connections: {}, + }; + + const nodeTypeCopy: INodeType = { + description: { + ...nodeType.description, + credentials: [ + { + name: credentialType, + required: true, + }, + ], + properties: [ + { + displayName: 'Temp', + name: 'temp', + type: 'string', + routing: { + request: credentialTestFunction.testRequest.request, + }, + default: '', + }, + ], + }, + }; + + mockNodesData[nodeTypeCopy.description.name] = { + sourcePath: '', + type: nodeTypeCopy, + }; + + const workflow = new Workflow({ + nodes: workflowData.nodes, + connections: workflowData.connections, + active: false, + nodeTypes: mockNodeTypes, + }); + + const mode = 'internal'; + const runIndex = 0; + const inputData: ITaskDataConnections = { + main: [[{ json: {} }]], + }; + const connectionInputData: INodeExecutionData[] = []; + const runExecutionData: IRunExecutionData = { + resultData: { + runData: {}, + }, + }; + + const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id, node.parameters); + + const routingNode = new RoutingNode( + workflow, + node, + connectionInputData, + runExecutionData ?? null, + additionalData, + mode, + ); + + let response: INodeExecutionData[][] | null | undefined; + + try { + response = await routingNode.runNode( + inputData, + runIndex, + nodeTypeCopy, + { node, data: {}, source: null }, + NodeExecuteFunctions, + credentialsDecrypted, + ); + } catch (error) { + ErrorReporter.error(error); + // Do not fail any requests to allow custom error messages and + // make logic easier + if (error.cause?.response) { + const errorResponseData = { + statusCode: error.cause.response.status, + statusMessage: error.cause.response.statusText, + }; + if (credentialTestFunction.testRequest.rules) { + // Special testing rules are defined so check all in order + for (const rule of credentialTestFunction.testRequest.rules) { + if (rule.type === 'responseCode') { + if (errorResponseData.statusCode === rule.properties.value) { + return { + status: 'Error', + message: rule.properties.message, + }; + } + } + } + } + + if (errorResponseData.statusCode < 199 || errorResponseData.statusCode > 299) { + // All requests with response codes that are not 2xx are treated by default as failed + return { + status: 'Error', + message: + errorResponseData.statusMessage || + `Received HTTP status code: ${errorResponseData.statusCode}`, + }; + } + } else if (error.cause?.code) { + return { + status: 'Error', + message: error.cause.code, + }; + } + this.logger.debug('Credential test failed', error); + return { + status: 'Error', + message: error.message.toString(), + }; + } finally { + delete mockNodesData[nodeTypeCopy.description.name]; + } + + if ( + credentialTestFunction.testRequest.rules && + Array.isArray(credentialTestFunction.testRequest.rules) + ) { + // Special testing rules are defined so check all in order + for (const rule of credentialTestFunction.testRequest.rules) { + if (rule.type === 'responseSuccessBody') { + const responseData = response![0][0].json; + if (get(responseData, rule.properties.key) === rule.properties.value) { + return { + status: 'Error', + message: rule.properties.message, + }; + } + } + } + } + + return { + status: 'OK', + message: 'Connection successful!', + }; + } +} diff --git a/packages/cli/src/services/metrics.service.ts b/packages/cli/src/services/metrics.service.ts index 8c185f39bf0fb..36fb6520f57fb 100644 --- a/packages/cli/src/services/metrics.service.ts +++ b/packages/cli/src/services/metrics.service.ts @@ -8,12 +8,8 @@ import { Service } from 'typedi'; import EventEmitter from 'events'; import { CacheService } from '@/services/cache/cache.service'; -import { - MessageEventBus, - METRICS_EVENT_NAME, - getLabelsForEvent, - type EventMessageTypes, -} from '@/eventbus'; +import { METRICS_EVENT_NAME, getLabelsForEvent, type EventMessageTypes } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { Logger } from '@/Logger'; @Service() diff --git a/packages/cli/src/services/orchestration.service.ts b/packages/cli/src/services/orchestration.service.ts index af56955805064..45100bbe2bbb3 100644 --- a/packages/cli/src/services/orchestration.service.ts +++ b/packages/cli/src/services/orchestration.service.ts @@ -6,6 +6,7 @@ import type { RedisServiceBaseCommand, RedisServiceCommand } from './redis/Redis import { RedisService } from './redis.service'; import { MultiMainSetup } from './orchestration/main/MultiMainSetup.ee'; +import type { WorkflowActivateMode } from 'n8n-workflow'; @Service() export class OrchestrationService { @@ -118,4 +119,29 @@ export class OrchestrationService { await this.redisPublisher.publishToCommandChannel({ command }); } + + // ---------------------------------- + // activations + // ---------------------------------- + + /** + * Whether this instance may add webhooks to the `webhook_entity` table. + */ + shouldAddWebhooks(activationMode: WorkflowActivateMode) { + if (activationMode === 'init') return false; + + if (activationMode === 'leadershipChange') return false; + + return this.isLeader; // 'update' or 'activate' + } + + /** + * Whether this instance may add triggers and pollers to memory. + * + * In both single- and multi-main setup, only the leader is allowed to manage + * triggers and pollers in memory, to ensure they are not duplicated. + */ + shouldAddTriggersAndPollers() { + return this.isLeader; + } } diff --git a/packages/cli/src/services/orchestration/main/MultiMainSetup.ee.ts b/packages/cli/src/services/orchestration/main/MultiMainSetup.ee.ts index 070834ac7b674..8d9cd5da23ef2 100644 --- a/packages/cli/src/services/orchestration/main/MultiMainSetup.ee.ts +++ b/packages/cli/src/services/orchestration/main/MultiMainSetup.ee.ts @@ -62,11 +62,9 @@ export class MultiMainSetup extends EventEmitter { if (config.getEnv('multiMainSetup.instanceType') === 'leader') { config.set('multiMainSetup.instanceType', 'follower'); - this.emit('leadershipChange'); // stop triggers, pollers, pruning + this.emit('leader-stepdown'); // lost leadership - stop triggers, pollers, pruning - EventReporter.report('[Multi-main setup] Leader failed to renew leader key', { - level: 'info', - }); + EventReporter.info('[Multi-main setup] Leader failed to renew leader key'); } return; @@ -79,7 +77,7 @@ export class MultiMainSetup extends EventEmitter { config.set('multiMainSetup.instanceType', 'follower'); - this.emit('leadershipVacant'); // stop triggers, pollers, pruning + this.emit('leader-stepdown'); // lost leadership - stop triggers, pollers, pruning await this.tryBecomeLeader(); } @@ -99,7 +97,7 @@ export class MultiMainSetup extends EventEmitter { await this.redisPublisher.setExpiration(this.leaderKey, this.leaderKeyTtl); - this.emit('leadershipChange'); // start triggers, pollers, pruning + this.emit('leader-takeover'); // gained leadership - start triggers, pollers, pruning } else { config.set('multiMainSetup.instanceType', 'follower'); } diff --git a/packages/cli/src/services/orchestration/main/handleCommandMessageMain.ts b/packages/cli/src/services/orchestration/main/handleCommandMessageMain.ts index ddf2c0e7fe72d..02cc5aae0a1ba 100644 --- a/packages/cli/src/services/orchestration/main/handleCommandMessageMain.ts +++ b/packages/cli/src/services/orchestration/main/handleCommandMessageMain.ts @@ -7,24 +7,30 @@ import { License } from '@/License'; import { Logger } from '@/Logger'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { Push } from '@/push'; +import { TestWebhooks } from '@/TestWebhooks'; import { OrchestrationService } from '@/services/orchestration.service'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; -import { TestWebhooks } from '@/TestWebhooks'; export async function handleCommandMessageMain(messageString: string) { const queueModeId = config.getEnv('redis.queueModeId'); const isMainInstance = config.getEnv('generic.instanceType') === 'main'; const message = messageToRedisServiceCommandObject(messageString); const logger = Container.get(Logger); - const activeWorkflowRunner = Container.get(ActiveWorkflowRunner); if (message) { logger.debug( `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, ); + + const selfSendingAllowed = [ + 'add-webhooks-triggers-and-pollers', + 'remove-triggers-and-pollers', + ].includes(message.command); + if ( - message.senderId === queueModeId || - (message.targets && !message.targets.includes(queueModeId)) + !selfSendingAllowed && + (message.senderId === queueModeId || + (message.targets && !message.targets.includes(queueModeId))) ) { // Skipping command message because it's not for this instance logger.debug( @@ -71,52 +77,106 @@ export async function handleCommandMessageMain(messageString: string) { await Container.get(ExternalSecretsManager).reloadAllProviders(); break; - case 'workflowActiveStateChanged': { + case 'add-webhooks-triggers-and-pollers': { if (!debounceMessageReceiver(message, 100)) { message.payload = { result: 'debounced' }; return message; } - const { workflowId, oldState, newState, versionId } = message.payload ?? {}; + const orchestrationService = Container.get(OrchestrationService); - if ( - typeof workflowId !== 'string' || - typeof oldState !== 'boolean' || - typeof newState !== 'boolean' || - typeof versionId !== 'string' - ) { - break; - } + if (orchestrationService.isFollower) break; + + if (typeof message.payload?.workflowId !== 'string') break; + + const { workflowId } = message.payload; - if (!oldState && newState) { - try { - await activeWorkflowRunner.add(workflowId, 'activate'); - push.broadcast('workflowActivated', { workflowId }); - } catch (e) { - const error = e instanceof Error ? e : new Error(`${e}`); + try { + await Container.get(ActiveWorkflowRunner).add(workflowId, 'activate', undefined, { + shouldPublish: false, // prevent leader re-publishing message + }); - await Container.get(WorkflowRepository).update(workflowId, { - active: false, - versionId, + push.broadcast('workflowActivated', { workflowId }); + + // instruct followers to show activation in UI + await orchestrationService.publish('display-workflow-activation', { workflowId }); + } catch (error) { + if (error instanceof Error) { + await Container.get(WorkflowRepository).update(workflowId, { active: false }); + + Container.get(Push).broadcast('workflowFailedToActivate', { + workflowId, + errorMessage: error.message, }); - await Container.get(OrchestrationService).publish('workflowFailedToActivate', { + await Container.get(OrchestrationService).publish('workflow-failed-to-activate', { workflowId, errorMessage: error.message, }); } - } else if (oldState && !newState) { - await activeWorkflowRunner.remove(workflowId); - push.broadcast('workflowDeactivated', { workflowId }); - } else { - await activeWorkflowRunner.remove(workflowId); - await activeWorkflowRunner.add(workflowId, 'update'); } + break; + } + + case 'remove-triggers-and-pollers': { + if (!debounceMessageReceiver(message, 100)) { + message.payload = { result: 'debounced' }; + return message; + } + + const orchestrationService = Container.get(OrchestrationService); + + if (orchestrationService.isFollower) break; + + if (typeof message.payload?.workflowId !== 'string') break; + + const { workflowId } = message.payload; + + const activeWorkflowRunner = Container.get(ActiveWorkflowRunner); + await activeWorkflowRunner.removeActivationError(workflowId); + await activeWorkflowRunner.removeWorkflowTriggersAndPollers(workflowId); + + push.broadcast('workflowDeactivated', { workflowId }); + + // instruct followers to show workflow deactivation in UI + await orchestrationService.publish('display-workflow-deactivation', { workflowId }); + + break; + } + + case 'display-workflow-activation': { + if (!debounceMessageReceiver(message, 100)) { + message.payload = { result: 'debounced' }; + return message; + } + + const { workflowId } = message.payload ?? {}; + + if (typeof workflowId !== 'string') break; + + push.broadcast('workflowActivated', { workflowId }); + + break; + } + + case 'display-workflow-deactivation': { + if (!debounceMessageReceiver(message, 100)) { + message.payload = { result: 'debounced' }; + return message; + } + + const { workflowId } = message.payload ?? {}; + + if (typeof workflowId !== 'string') break; + + push.broadcast('workflowDeactivated', { workflowId }); + + break; } - case 'workflowFailedToActivate': { + case 'workflow-failed-to-activate': { if (!debounceMessageReceiver(message, 100)) { message.payload = { result: 'debounced' }; return message; diff --git a/packages/cli/src/services/orchestration/webhook/handleCommandMessageWebhook.ts b/packages/cli/src/services/orchestration/webhook/handleCommandMessageWebhook.ts index 3be43ae835837..9dc326978d803 100644 --- a/packages/cli/src/services/orchestration/webhook/handleCommandMessageWebhook.ts +++ b/packages/cli/src/services/orchestration/webhook/handleCommandMessageWebhook.ts @@ -1,6 +1,75 @@ -import { handleCommandMessageMain } from '../main/handleCommandMessageMain'; +import { ExternalSecretsManager } from '@/ExternalSecrets/ExternalSecretsManager.ee'; +import { License } from '@/License'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; +import Container from 'typedi'; +import { Logger } from 'winston'; +import { messageToRedisServiceCommandObject, debounceMessageReceiver } from '../helpers'; +import config from '@/config'; export async function handleCommandMessageWebhook(messageString: string) { - // currently webhooks handle commands the same way as the main instance - return await handleCommandMessageMain(messageString); + const queueModeId = config.getEnv('redis.queueModeId'); + const isMainInstance = config.getEnv('generic.instanceType') === 'main'; + const message = messageToRedisServiceCommandObject(messageString); + const logger = Container.get(Logger); + + if (message) { + logger.debug( + `RedisCommandHandler(main): Received command message ${message.command} from ${message.senderId}`, + ); + + if ( + message.senderId === queueModeId || + (message.targets && !message.targets.includes(queueModeId)) + ) { + // Skipping command message because it's not for this instance + logger.debug( + `Skipping command message ${message.command} because it's not for this instance.`, + ); + return message; + } + + switch (message.command) { + case 'reloadLicense': + if (!debounceMessageReceiver(message, 500)) { + message.payload = { + result: 'debounced', + }; + return message; + } + + if (isMainInstance && !config.getEnv('multiMainSetup.enabled')) { + // at this point in time, only a single main instance is supported, thus this command _should_ never be caught currently + logger.error( + 'Received command to reload license via Redis, but this should not have happened and is not supported on the main instance yet.', + ); + return message; + } + await Container.get(License).reload(); + break; + case 'restartEventBus': + if (!debounceMessageReceiver(message, 200)) { + message.payload = { + result: 'debounced', + }; + return message; + } + await Container.get(MessageEventBus).restart(); + case 'reloadExternalSecretsProviders': + if (!debounceMessageReceiver(message, 200)) { + message.payload = { + result: 'debounced', + }; + return message; + } + await Container.get(ExternalSecretsManager).reloadAllProviders(); + break; + + default: + break; + } + + return message; + } + + return; } diff --git a/packages/cli/src/services/password.utility.ts b/packages/cli/src/services/password.utility.ts index 314a3fef71b4c..8c8c3aaf1964c 100644 --- a/packages/cli/src/services/password.utility.ts +++ b/packages/cli/src/services/password.utility.ts @@ -1,18 +1,17 @@ import { BadRequestError } from '@/errors/response-errors/bad-request.error'; import { Service as Utility } from 'typedi'; -import { compare, genSaltSync, hash } from 'bcryptjs'; +import { compare, hash } from 'bcryptjs'; import { MAX_PASSWORD_CHAR_LENGTH as maxLength, MIN_PASSWORD_CHAR_LENGTH as minLength, } from '@/constants'; +const SALT_ROUNDS = 10; + @Utility() export class PasswordUtility { async hash(plaintext: string) { - const SALT_ROUNDS = 10; - const salt = genSaltSync(SALT_ROUNDS); - - return await hash(plaintext, salt); + return await hash(plaintext, SALT_ROUNDS); } async compare(plaintext: string, hashed: string) { diff --git a/packages/cli/src/services/redis/RedisServiceCommands.ts b/packages/cli/src/services/redis/RedisServiceCommands.ts index e1c20d71a6f3b..b7c15ac0ef265 100644 --- a/packages/cli/src/services/redis/RedisServiceCommands.ts +++ b/packages/cli/src/services/redis/RedisServiceCommands.ts @@ -7,8 +7,11 @@ export type RedisServiceCommand = | 'stopWorker' | 'reloadLicense' | 'reloadExternalSecretsProviders' - | 'workflowActiveStateChanged' // multi-main only - | 'workflowFailedToActivate' // multi-main only + | 'display-workflow-activation' // multi-main only + | 'display-workflow-deactivation' // multi-main only + | 'add-webhooks-triggers-and-pollers' // multi-main only + | 'remove-triggers-and-pollers' // multi-main only + | 'workflow-failed-to-activate' // multi-main only | 'relay-execution-lifecycle-event' // multi-main only | 'clear-test-webhooks'; // multi-main only diff --git a/packages/cli/src/services/userOnboarding.service.ts b/packages/cli/src/services/userOnboarding.service.ts index ab8dbb98c13dc..3f61a4aac03f4 100644 --- a/packages/cli/src/services/userOnboarding.service.ts +++ b/packages/cli/src/services/userOnboarding.service.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { In } from 'typeorm'; +import { In } from '@n8n/typeorm'; import type { User } from '@db/entities/User'; import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository'; diff --git a/packages/cli/src/workflows/workflow.service.ee.ts b/packages/cli/src/workflows/workflow.service.ee.ts index 5c96808451fed..a95536d80f86b 100644 --- a/packages/cli/src/workflows/workflow.service.ee.ts +++ b/packages/cli/src/workflows/workflow.service.ee.ts @@ -24,6 +24,7 @@ export class EnterpriseWorkflowService { private readonly sharedWorkflowRepository: SharedWorkflowRepository, private readonly workflowRepository: WorkflowRepository, private readonly credentialsRepository: CredentialsRepository, + private readonly credentialsService: CredentialsService, ) {} async isOwned( @@ -70,7 +71,7 @@ export class EnterpriseWorkflowService { currentUser: User, ): Promise { workflow.usedCredentials = []; - const userCredentials = await CredentialsService.getMany(currentUser, { onlyOwn: true }); + const userCredentials = await this.credentialsService.getMany(currentUser, { onlyOwn: true }); const credentialIdsUsedByWorkflow = new Set(); workflow.nodes.forEach((node) => { if (!node.credentials) { @@ -139,7 +140,7 @@ export class EnterpriseWorkflowService { throw new NotFoundError('Workflow not found'); } - const allCredentials = await CredentialsService.getMany(user); + const allCredentials = await this.credentialsService.getMany(user); try { return this.validateWorkflowCredentialUsage(workflow, previousVersion, allCredentials); diff --git a/packages/cli/src/workflows/workflow.service.ts b/packages/cli/src/workflows/workflow.service.ts index 3833510b75b0e..5cf6d6974efba 100644 --- a/packages/cli/src/workflows/workflow.service.ts +++ b/packages/cli/src/workflows/workflow.service.ts @@ -80,8 +80,6 @@ export class WorkflowService { ); } - const oldState = shared.workflow.active; - if ( !forceSave && workflow.versionId !== '' && @@ -166,10 +164,7 @@ export class WorkflowService { ); if (tagIds && !config.getEnv('workflowTagsDisabled')) { - await this.workflowTagMappingRepository.delete({ workflowId }); - await this.workflowTagMappingRepository.insert( - tagIds.map((tagId) => ({ tagId, workflowId })), - ); + await this.workflowTagMappingRepository.overwriteTaggings(workflowId, tagIds); } if (workflow.versionId !== shared.workflow.versionId) { @@ -230,17 +225,6 @@ export class WorkflowService { await this.orchestrationService.init(); - const newState = updatedWorkflow.active; - - if (this.orchestrationService.isMultiMainSetupEnabled && oldState !== newState) { - await this.orchestrationService.publish('workflowActiveStateChanged', { - workflowId, - oldState, - newState, - versionId: shared.workflow.versionId, - }); - } - return updatedWorkflow; } diff --git a/packages/cli/src/workflows/workflowSharing.service.ts b/packages/cli/src/workflows/workflowSharing.service.ts index 78f325c719bcb..93df8e0acaf2e 100644 --- a/packages/cli/src/workflows/workflowSharing.service.ts +++ b/packages/cli/src/workflows/workflowSharing.service.ts @@ -1,5 +1,5 @@ import { Service } from 'typedi'; -import { In, type FindOptionsWhere } from 'typeorm'; +import { In, type FindOptionsWhere } from '@n8n/typeorm'; import type { SharedWorkflow, WorkflowSharingRole } from '@db/entities/SharedWorkflow'; import type { User } from '@db/entities/User'; diff --git a/packages/cli/src/workflows/workflows.controller.ts b/packages/cli/src/workflows/workflows.controller.ts index d3bf076ea163b..9926d47f9e7ad 100644 --- a/packages/cli/src/workflows/workflows.controller.ts +++ b/packages/cli/src/workflows/workflows.controller.ts @@ -1,4 +1,3 @@ -import { Service } from 'typedi'; import express from 'express'; import { v4 as uuid } from 'uuid'; import axios from 'axios'; @@ -40,7 +39,6 @@ import { WorkflowExecutionService } from './workflowExecution.service'; import { WorkflowSharingService } from './workflowSharing.service'; import { UserManagementMailer } from '@/UserManagement/email'; -@Service() @Authorized() @RestController('/workflows') export class WorkflowsController { @@ -62,6 +60,7 @@ export class WorkflowsController { private readonly userRepository: UserRepository, private readonly license: License, private readonly mailer: UserManagementMailer, + private readonly credentialsService: CredentialsService, ) {} @Post('/') @@ -92,7 +91,7 @@ export class WorkflowsController { // This is a new workflow, so we simply check if the user has access to // all used workflows - const allCredentials = await CredentialsService.getMany(req.user); + const allCredentials = await this.credentialsService.getMany(req.user); try { this.enterpriseWorkflowService.validateCredentialPermissionsToUser( diff --git a/packages/cli/test/integration/ActiveWorkflowRunner.test.ts b/packages/cli/test/integration/ActiveWorkflowRunner.test.ts index 4088e00d111d8..571d2dedbd216 100644 --- a/packages/cli/test/integration/ActiveWorkflowRunner.test.ts +++ b/packages/cli/test/integration/ActiveWorkflowRunner.test.ts @@ -1,6 +1,6 @@ import { Container } from 'typedi'; import { NodeApiError, NodeOperationError, Workflow } from 'n8n-workflow'; -import type { IWebhookData, WorkflowActivateMode } from 'n8n-workflow'; +import type { IWebhookData } from 'n8n-workflow'; import { ActiveExecutions } from '@/ActiveExecutions'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; @@ -14,13 +14,11 @@ import * as AdditionalData from '@/WorkflowExecuteAdditionalData'; import type { User } from '@db/entities/User'; import type { WebhookEntity } from '@db/entities/WebhookEntity'; import { NodeTypes } from '@/NodeTypes'; -import { OrchestrationService } from '@/services/orchestration.service'; import { ExecutionService } from '@/executions/execution.service'; import { WorkflowService } from '@/workflows/workflow.service'; import { ActiveWorkflowsService } from '@/services/activeWorkflows.service'; import { mockInstance } from '../shared/mocking'; -import { chooseRandomly } from './shared/random'; import { setSchedulerAsLoadedNode } from './shared/utils'; import * as testDb from './shared/testDb'; import { createOwner } from './shared/db/users'; @@ -33,11 +31,6 @@ mockInstance(ExecutionService); mockInstance(WorkflowService); const webhookService = mockInstance(WebhookService); -const orchestrationService = mockInstance(OrchestrationService, { - isMultiMainSetupEnabled: false, - isLeader: false, - isFollower: false, -}); setSchedulerAsLoadedNode(); @@ -47,14 +40,6 @@ let activeWorkflowsService: ActiveWorkflowsService; let activeWorkflowRunner: ActiveWorkflowRunner; let owner: User; -const NON_LEADERSHIP_CHANGE_MODES: WorkflowActivateMode[] = [ - 'init', - 'create', - 'update', - 'activate', - 'manual', -]; - beforeAll(async () => { await testDb.init(); @@ -215,113 +200,6 @@ describe('executeErrorWorkflow()', () => { }); }); -describe('add()', () => { - describe('in single-main scenario', () => { - test('should add webhooks, triggers and pollers', async () => { - const mode = chooseRandomly(NON_LEADERSHIP_CHANGE_MODES); - - const workflow = await createWorkflow({ active: true }, owner); - - const addWebhooksSpy = jest.spyOn(activeWorkflowRunner, 'addWebhooks'); - const addTriggersAndPollersSpy = jest.spyOn(activeWorkflowRunner, 'addTriggersAndPollers'); - - await activeWorkflowRunner.init(); - - addWebhooksSpy.mockReset(); - addTriggersAndPollersSpy.mockReset(); - - await activeWorkflowRunner.add(workflow.id, mode); - - expect(addWebhooksSpy).toHaveBeenCalledTimes(1); - expect(addTriggersAndPollersSpy).toHaveBeenCalledTimes(1); - }); - }); - - describe('in multi-main scenario', () => { - describe('leader', () => { - describe('on non-leadership-change activation mode', () => { - test('should add webhooks only', async () => { - const mode = chooseRandomly(NON_LEADERSHIP_CHANGE_MODES); - - const workflow = await createWorkflow({ active: true }, owner); - - jest.replaceProperty(orchestrationService, 'isMultiMainSetupEnabled', true); - jest.replaceProperty(orchestrationService, 'isLeader', true); - - const addWebhooksSpy = jest.spyOn(activeWorkflowRunner, 'addWebhooks'); - const addTriggersAndPollersSpy = jest.spyOn( - activeWorkflowRunner, - 'addTriggersAndPollers', - ); - - await activeWorkflowRunner.init(); - addWebhooksSpy.mockReset(); - addTriggersAndPollersSpy.mockReset(); - - await activeWorkflowRunner.add(workflow.id, mode); - - expect(addWebhooksSpy).toHaveBeenCalledTimes(1); - expect(addTriggersAndPollersSpy).toHaveBeenCalledTimes(1); - }); - }); - - describe('on leadership change activation mode', () => { - test('should add triggers and pollers only', async () => { - const mode = 'leadershipChange'; - - jest.replaceProperty(orchestrationService, 'isMultiMainSetupEnabled', true); - jest.replaceProperty(orchestrationService, 'isLeader', true); - - const workflow = await createWorkflow({ active: true }, owner); - - const addWebhooksSpy = jest.spyOn(activeWorkflowRunner, 'addWebhooks'); - const addTriggersAndPollersSpy = jest.spyOn( - activeWorkflowRunner, - 'addTriggersAndPollers', - ); - - await activeWorkflowRunner.init(); - addWebhooksSpy.mockReset(); - addTriggersAndPollersSpy.mockReset(); - - await activeWorkflowRunner.add(workflow.id, mode); - - expect(addWebhooksSpy).not.toHaveBeenCalled(); - expect(addTriggersAndPollersSpy).toHaveBeenCalledTimes(1); - }); - }); - }); - - describe('follower', () => { - describe('on any activation mode', () => { - test('should not add webhooks, triggers or pollers', async () => { - const mode = chooseRandomly(NON_LEADERSHIP_CHANGE_MODES); - - jest.replaceProperty(orchestrationService, 'isMultiMainSetupEnabled', true); - jest.replaceProperty(orchestrationService, 'isLeader', false); - - const workflow = await createWorkflow({ active: true }, owner); - - const addWebhooksSpy = jest.spyOn(activeWorkflowRunner, 'addWebhooks'); - const addTriggersAndPollersSpy = jest.spyOn( - activeWorkflowRunner, - 'addTriggersAndPollers', - ); - - await activeWorkflowRunner.init(); - addWebhooksSpy.mockReset(); - addTriggersAndPollersSpy.mockReset(); - - await activeWorkflowRunner.add(workflow.id, mode); - - expect(addWebhooksSpy).not.toHaveBeenCalled(); - expect(addTriggersAndPollersSpy).not.toHaveBeenCalled(); - }); - }); - }); - }); -}); - describe('addWebhooks()', () => { test('should call `WebhookService.storeWebhook()`', async () => { const mockWebhook = { path: 'fake-path' } as unknown as IWebhookData; diff --git a/packages/cli/test/integration/PermissionChecker.test.ts b/packages/cli/test/integration/PermissionChecker.test.ts new file mode 100644 index 0000000000000..b8035926e8df7 --- /dev/null +++ b/packages/cli/test/integration/PermissionChecker.test.ts @@ -0,0 +1,385 @@ +import { v4 as uuid } from 'uuid'; +import { Container } from 'typedi'; +import type { WorkflowSettings } from 'n8n-workflow'; +import { SubworkflowOperationError, Workflow } from 'n8n-workflow'; + +import config from '@/config'; +import { User } from '@db/entities/User'; +import { WorkflowRepository } from '@db/repositories/workflow.repository'; +import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository'; +import { UserRepository } from '@/databases/repositories/user.repository'; +import { generateNanoId } from '@/databases/utils/generators'; +import { License } from '@/License'; +import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; +import { NodeTypes } from '@/NodeTypes'; +import { OwnershipService } from '@/services/ownership.service'; +import { PermissionChecker } from '@/UserManagement/PermissionChecker'; + +import { mockInstance } from '../shared/mocking'; +import { + randomCredentialPayload as randomCred, + randomName, + randomPositiveDigit, +} from '../integration/shared/random'; +import { LicenseMocker } from '../integration/shared/license'; +import * as testDb from '../integration/shared/testDb'; +import type { SaveCredentialFunction } from '../integration/shared/types'; +import { mockNodeTypesData } from '../unit/Helpers'; +import { affixRoleToSaveCredential } from '../integration/shared/db/credentials'; +import { createOwner, createUser } from '../integration/shared/db/users'; + +export const toTargetCallErrorMsg = (subworkflowId: string) => + `Target workflow ID ${subworkflowId} may not be called`; + +export function createParentWorkflow() { + return Container.get(WorkflowRepository).create({ + id: generateNanoId(), + name: randomName(), + active: false, + connections: {}, + nodes: [ + { + name: '', + typeVersion: 1, + type: 'n8n-nodes-base.executeWorkflow', + position: [0, 0], + parameters: {}, + }, + ], + }); +} + +export function createSubworkflow({ + policy, + callerIds, +}: { + policy?: WorkflowSettings.CallerPolicy; + callerIds?: string; +} = {}) { + return new Workflow({ + id: uuid(), + nodes: [], + connections: {}, + active: false, + nodeTypes: mockNodeTypes, + settings: { + ...(policy ? { callerPolicy: policy } : {}), + ...(callerIds ? { callerIds } : {}), + }, + }); +} + +let saveCredential: SaveCredentialFunction; + +const mockNodeTypes = mockInstance(NodeTypes); +mockInstance(LoadNodesAndCredentials, { + loadedNodes: mockNodeTypesData(['start', 'actionNetwork']), +}); + +let permissionChecker: PermissionChecker; + +beforeAll(async () => { + await testDb.init(); + + saveCredential = affixRoleToSaveCredential('credential:owner'); + + permissionChecker = Container.get(PermissionChecker); +}); + +describe('check()', () => { + beforeEach(async () => { + await testDb.truncate(['Workflow', 'Credentials']); + }); + + afterAll(async () => { + await testDb.terminate(); + }); + + test('should allow if workflow has no creds', async () => { + const userId = uuid(); + + const workflow = new Workflow({ + id: randomPositiveDigit().toString(), + name: 'test', + active: false, + connections: {}, + nodeTypes: mockNodeTypes, + nodes: [ + { + id: uuid(), + name: 'Start', + type: 'n8n-nodes-base.start', + typeVersion: 1, + parameters: {}, + position: [0, 0], + }, + ], + }); + + expect(async () => await permissionChecker.check(workflow, userId)).not.toThrow(); + }); + + test('should allow if requesting user is instance owner', async () => { + const owner = await createOwner(); + + const workflow = new Workflow({ + id: randomPositiveDigit().toString(), + name: 'test', + active: false, + connections: {}, + nodeTypes: mockNodeTypes, + nodes: [ + { + id: uuid(), + name: 'Action Network', + type: 'n8n-nodes-base.actionNetwork', + parameters: {}, + typeVersion: 1, + position: [0, 0], + credentials: { + actionNetworkApi: { + id: randomPositiveDigit().toString(), + name: 'Action Network Account', + }, + }, + }, + ], + }); + + expect(async () => await permissionChecker.check(workflow, owner.id)).not.toThrow(); + }); + + test('should allow if workflow creds are valid subset', async () => { + const [owner, member] = await Promise.all([createOwner(), createUser()]); + + const ownerCred = await saveCredential(randomCred(), { user: owner }); + const memberCred = await saveCredential(randomCred(), { user: member }); + + const workflow = new Workflow({ + id: randomPositiveDigit().toString(), + name: 'test', + active: false, + connections: {}, + nodeTypes: mockNodeTypes, + nodes: [ + { + id: uuid(), + name: 'Action Network', + type: 'n8n-nodes-base.actionNetwork', + parameters: {}, + typeVersion: 1, + position: [0, 0], + credentials: { + actionNetworkApi: { + id: ownerCred.id, + name: ownerCred.name, + }, + }, + }, + { + id: uuid(), + name: 'Action Network 2', + type: 'n8n-nodes-base.actionNetwork', + parameters: {}, + typeVersion: 1, + position: [0, 0], + credentials: { + actionNetworkApi: { + id: memberCred.id, + name: memberCred.name, + }, + }, + }, + ], + }); + + expect(async () => await permissionChecker.check(workflow, owner.id)).not.toThrow(); + }); + + test('should deny if workflow creds are not valid subset', async () => { + const member = await createUser(); + + const memberCred = await saveCredential(randomCred(), { user: member }); + + const workflowDetails = { + id: randomPositiveDigit().toString(), + name: 'test', + active: false, + connections: {}, + nodeTypes: mockNodeTypes, + nodes: [ + { + id: uuid(), + name: 'Action Network', + type: 'n8n-nodes-base.actionNetwork', + parameters: {}, + typeVersion: 1, + position: [0, 0] as [number, number], + credentials: { + actionNetworkApi: { + id: memberCred.id, + name: memberCred.name, + }, + }, + }, + { + id: uuid(), + name: 'Action Network 2', + type: 'n8n-nodes-base.actionNetwork', + parameters: {}, + typeVersion: 1, + position: [0, 0] as [number, number], + credentials: { + actionNetworkApi: { + id: 'non-existing-credential-id', + name: 'Non-existing credential name', + }, + }, + }, + ], + }; + + const workflowEntity = await Container.get(WorkflowRepository).save(workflowDetails); + + await Container.get(SharedWorkflowRepository).save({ + workflow: workflowEntity, + user: member, + role: 'workflow:owner', + }); + + const workflow = new Workflow(workflowDetails); + + await expect(permissionChecker.check(workflow, member.id)).rejects.toThrow(); + }); +}); + +describe('checkSubworkflowExecutePolicy()', () => { + const ownershipService = mockInstance(OwnershipService); + + let license: LicenseMocker; + + beforeAll(() => { + license = new LicenseMocker(); + license.mock(Container.get(License)); + license.enable('feat:sharing'); + }); + + describe('no caller policy', () => { + test('should fall back to N8N_WORKFLOW_CALLER_POLICY_DEFAULT_OPTION', async () => { + config.set('workflows.callerPolicyDefaultOption', 'none'); + + const parentWorkflow = createParentWorkflow(); + const subworkflow = createSubworkflow(); // no caller policy + + ownershipService.getWorkflowOwnerCached.mockResolvedValue(new User()); + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + + await expect(check).rejects.toThrow(toTargetCallErrorMsg(subworkflow.id)); + + config.load(config.default); + }); + }); + + describe('overridden caller policy', () => { + test('if no sharing, should override policy to workflows-from-same-owner', async () => { + license.disable('feat:sharing'); + + const parentWorkflow = createParentWorkflow(); + const subworkflow = createSubworkflow({ policy: 'any' }); // should be overridden + + const firstUser = Container.get(UserRepository).create({ id: uuid() }); + const secondUser = Container.get(UserRepository).create({ id: uuid() }); + + ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(firstUser); // parent workflow + ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(secondUser); // subworkflow + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + + await expect(check).rejects.toThrow(toTargetCallErrorMsg(subworkflow.id)); + + try { + await permissionChecker.checkSubworkflowExecutePolicy(subworkflow, uuid()); + } catch (error) { + if (error instanceof SubworkflowOperationError) { + expect(error.description).toBe( + `${firstUser.firstName} (${firstUser.email}) can make this change. You may need to tell them the ID of this workflow, which is ${subworkflow.id}`, + ); + } + } + + license.enable('feat:sharing'); + }); + }); + + describe('workflows-from-list caller policy', () => { + test('should allow if caller list contains parent workflow ID', async () => { + const parentWorkflow = createParentWorkflow(); + + const subworkflow = createSubworkflow({ + policy: 'workflowsFromAList', + callerIds: `123,456,bcdef, ${parentWorkflow.id}`, + }); + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + + await expect(check).resolves.not.toThrow(); + }); + + test('should deny if caller list does not contain parent workflow ID', async () => { + const parentWorkflow = createParentWorkflow(); + + const subworkflow = createSubworkflow({ + policy: 'workflowsFromAList', + callerIds: 'xyz', + }); + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + + await expect(check).rejects.toThrow(); + }); + }); + + describe('any caller policy', () => { + test('should not throw', async () => { + const parentWorkflow = createParentWorkflow(); + const subworkflow = createSubworkflow({ policy: 'any' }); + ownershipService.getWorkflowOwnerCached.mockResolvedValue(new User()); + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + + await expect(check).resolves.not.toThrow(); + }); + }); + + describe('workflows-from-same-owner caller policy', () => { + test('should deny if the two workflows are owned by different users', async () => { + const parentWorkflowOwner = Container.get(UserRepository).create({ id: uuid() }); + const subworkflowOwner = Container.get(UserRepository).create({ id: uuid() }); + + ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(parentWorkflowOwner); // parent workflow + ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(subworkflowOwner); // subworkflow + + const subworkflow = createSubworkflow({ policy: 'workflowsFromSameOwner' }); + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, uuid()); + + await expect(check).rejects.toThrow(toTargetCallErrorMsg(subworkflow.id)); + }); + + test('should allow if both workflows are owned by the same user', async () => { + const parentWorkflow = createParentWorkflow(); + + const bothWorkflowsOwner = Container.get(UserRepository).create({ id: uuid() }); + + ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(bothWorkflowsOwner); // parent workflow + ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(bothWorkflowsOwner); // subworkflow + + const subworkflow = createSubworkflow({ policy: 'workflowsFromSameOwner' }); + + const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + + await expect(check).resolves.not.toThrow(); + }); + }); +}); diff --git a/packages/cli/test/integration/controllers/invitation/invitation.controller.integration.test.ts b/packages/cli/test/integration/controllers/invitation/invitation.controller.integration.test.ts index 971360bc05438..b6371fdf9e361 100644 --- a/packages/cli/test/integration/controllers/invitation/invitation.controller.integration.test.ts +++ b/packages/cli/test/integration/controllers/invitation/invitation.controller.integration.test.ts @@ -1,6 +1,6 @@ import { mocked } from 'jest-mock'; import Container from 'typedi'; -import { Not } from 'typeorm'; +import { Not } from '@n8n/typeorm'; import { InternalHooks } from '@/InternalHooks'; import { ExternalHooks } from '@/ExternalHooks'; diff --git a/packages/cli/test/integration/credentials.ee.test.ts b/packages/cli/test/integration/credentials.ee.test.ts index be9fc74be4e9c..ab09f4c14349e 100644 --- a/packages/cli/test/integration/credentials.ee.test.ts +++ b/packages/cli/test/integration/credentials.ee.test.ts @@ -1,12 +1,11 @@ import { Container } from 'typedi'; import type { SuperAgentTest } from 'supertest'; -import { In } from 'typeorm'; +import { In } from '@n8n/typeorm'; import type { IUser } from 'n8n-workflow'; import type { ListQuery } from '@/requests'; import type { User } from '@db/entities/User'; import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository'; -import { License } from '@/License'; import { randomCredentialPayload } from './shared/random'; import * as testDb from './shared/testDb'; @@ -19,8 +18,10 @@ import { UserManagementMailer } from '@/UserManagement/email'; import { mockInstance } from '../shared/mocking'; import config from '@/config'; -const sharingSpy = jest.spyOn(License.prototype, 'isSharingEnabled').mockReturnValue(true); -const testServer = utils.setupTestServer({ endpointGroups: ['credentials'] }); +const testServer = utils.setupTestServer({ + endpointGroups: ['credentials'], + enabledFeatures: ['feat:sharing'], +}); let owner: User; let member: User; @@ -49,38 +50,6 @@ afterEach(() => { jest.clearAllMocks(); }); -// ---------------------------------------- -// dynamic router switching -// ---------------------------------------- -describe('router should switch based on flag', () => { - let savedCredentialId: string; - - beforeEach(async () => { - const savedCredential = await saveCredential(randomCredentialPayload(), { user: owner }); - savedCredentialId = savedCredential.id; - }); - - test('when sharing is disabled', async () => { - sharingSpy.mockReturnValueOnce(false); - - await authOwnerAgent - .put(`/credentials/${savedCredentialId}/share`) - .send({ shareWithIds: [member.id] }) - .expect(404); - - await authOwnerAgent.get(`/credentials/${savedCredentialId}`).send().expect(200); - }); - - test('when sharing is enabled', async () => { - await authOwnerAgent - .put(`/credentials/${savedCredentialId}/share`) - .send({ shareWithIds: [member.id] }) - .expect(200); - - await authOwnerAgent.get(`/credentials/${savedCredentialId}`).send().expect(200); - }); -}); - // ---------------------------------------- // GET /credentials - fetch all credentials // ---------------------------------------- @@ -521,6 +490,7 @@ describe('PUT /credentials/:id/share', () => { responses.forEach((response) => expect(response.statusCode).toBe(400)); expect(mailer.notifyCredentialsShared).toHaveBeenCalledTimes(0); }); + test('should unshare the credential', async () => { const savedCredential = await saveCredential(randomCredentialPayload(), { user: owner }); diff --git a/packages/cli/test/integration/eventbus.ee.test.ts b/packages/cli/test/integration/eventbus.ee.test.ts index dbded5128994a..f486f39abe12b 100644 --- a/packages/cli/test/integration/eventbus.ee.test.ts +++ b/packages/cli/test/integration/eventbus.ee.test.ts @@ -16,7 +16,7 @@ import { } from 'n8n-workflow'; import type { User } from '@db/entities/User'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { EventMessageGeneric } from '@/eventbus/EventMessageClasses/EventMessageGeneric'; import type { MessageEventBusDestinationSyslog } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationSyslog.ee'; import type { MessageEventBusDestinationWebhook } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationWebhook.ee'; diff --git a/packages/cli/test/integration/eventbus.test.ts b/packages/cli/test/integration/eventbus.test.ts index 3a441c7885d9c..9e3e27dacb916 100644 --- a/packages/cli/test/integration/eventbus.test.ts +++ b/packages/cli/test/integration/eventbus.test.ts @@ -1,7 +1,7 @@ import type { SuperAgentTest } from 'supertest'; import type { User } from '@db/entities/User'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { ExecutionDataRecoveryService } from '@/eventbus/executionDataRecovery.service'; import * as utils from './shared/utils/'; diff --git a/packages/cli/test/integration/ldap/ldap.api.test.ts b/packages/cli/test/integration/ldap/ldap.api.test.ts index 5969fa4b7f327..f3a923d4bd85e 100644 --- a/packages/cli/test/integration/ldap/ldap.api.test.ts +++ b/packages/cli/test/integration/ldap/ldap.api.test.ts @@ -1,7 +1,7 @@ import Container from 'typedi'; import type { SuperAgentTest } from 'supertest'; import type { Entry as LdapUser } from 'ldapts'; -import { Not } from 'typeorm'; +import { Not } from '@n8n/typeorm'; import { jsonParse } from 'n8n-workflow'; import { Cipher } from 'n8n-core'; @@ -47,7 +47,7 @@ const testServer = utils.setupTestServer({ }); beforeAll(async () => { - owner = await createUser({ role: 'global:owner', password: 'password' }); + owner = await createUser({ role: 'global:owner' }); authOwnerAgent = testServer.authAgentFor(owner); defaultLdapConfig.bindingAdminPassword = Container.get(Cipher).encrypt( diff --git a/packages/cli/test/integration/me.api.test.ts b/packages/cli/test/integration/me.api.test.ts index 61dde8c92f97d..08261b7b08297 100644 --- a/packages/cli/test/integration/me.api.test.ts +++ b/packages/cli/test/integration/me.api.test.ts @@ -1,5 +1,5 @@ import type { SuperAgentTest } from 'supertest'; -import { IsNull } from 'typeorm'; +import { IsNull } from '@n8n/typeorm'; import validator from 'validator'; import type { User } from '@db/entities/User'; import { SUCCESS_RESPONSE_BODY } from './shared/constants'; diff --git a/packages/cli/test/integration/shared/db/users.ts b/packages/cli/test/integration/shared/db/users.ts index 27defb218467d..2ee01524bfecd 100644 --- a/packages/cli/test/integration/shared/db/users.ts +++ b/packages/cli/test/integration/shared/db/users.ts @@ -9,6 +9,9 @@ import { MfaService } from '@/Mfa/mfa.service'; import { randomApiKey, randomEmail, randomName, randomValidPassword } from '../random'; +// pre-computed bcrypt hash for the string 'password', using `await hash('password', 10)` +const passwordHash = '$2a$10$njedH7S6V5898mj6p0Jr..IGY9Ms.qNwR7RbSzzX9yubJocKfvGGK'; + /** * Store a user in the DB, defaulting to a `member`. */ @@ -16,7 +19,7 @@ export async function createUser(attributes: Partial = {}): Promise const { email, password, firstName, lastName, role, ...rest } = attributes; const user = Container.get(UserRepository).create({ email: email ?? randomEmail(), - password: await hash(password ?? randomValidPassword(), 10), + password: password ? await hash(password, 1) : passwordHash, firstName: firstName ?? randomName(), lastName: lastName ?? randomName(), role: role ?? 'global:member', @@ -101,7 +104,7 @@ export async function createManyUsers( [...Array(amount)].map(async () => Container.get(UserRepository).create({ email: email ?? randomEmail(), - password: await hash(password ?? randomValidPassword(), 10), + password: password ? await hash(password, 1) : passwordHash, firstName: firstName ?? randomName(), lastName: lastName ?? randomName(), role: role ?? 'global:member', diff --git a/packages/cli/test/integration/shared/db/workflows.ts b/packages/cli/test/integration/shared/db/workflows.ts index 5603db7ab9625..f0758088f1667 100644 --- a/packages/cli/test/integration/shared/db/workflows.ts +++ b/packages/cli/test/integration/shared/db/workflows.ts @@ -1,5 +1,5 @@ import Container from 'typedi'; -import type { DeepPartial } from 'typeorm'; +import type { DeepPartial } from '@n8n/typeorm'; import { v4 as uuid } from 'uuid'; import type { User } from '@db/entities/User'; diff --git a/packages/cli/test/integration/shared/testDb.ts b/packages/cli/test/integration/shared/testDb.ts index 1ed59388951a0..4158daccfe5a2 100644 --- a/packages/cli/test/integration/shared/testDb.ts +++ b/packages/cli/test/integration/shared/testDb.ts @@ -1,5 +1,5 @@ -import type { DataSourceOptions as ConnectionOptions, Repository } from 'typeorm'; -import { DataSource as Connection } from 'typeorm'; +import type { DataSourceOptions as ConnectionOptions, Repository } from '@n8n/typeorm'; +import { DataSource as Connection } from '@n8n/typeorm'; import { Container } from 'typedi'; import type { Class } from 'n8n-core'; diff --git a/packages/cli/test/integration/shared/utils/index.ts b/packages/cli/test/integration/shared/utils/index.ts index 584f6e0b2f739..e2c2225b8b76b 100644 --- a/packages/cli/test/integration/shared/utils/index.ts +++ b/packages/cli/test/integration/shared/utils/index.ts @@ -31,9 +31,12 @@ export { setupTestServer } from './testServer'; * Initialize node types. */ export async function initActiveWorkflowRunner() { - mockInstance(Push); - mockInstance(OrchestrationService); + mockInstance(OrchestrationService, { + isMultiMainSetupEnabled: false, + shouldAddWebhooks: jest.fn().mockReturnValue(true), + }); + mockInstance(Push); mockInstance(ExecutionService); const { ActiveWorkflowRunner } = await import('@/ActiveWorkflowRunner'); const workflowRunner = Container.get(ActiveWorkflowRunner); diff --git a/packages/cli/test/integration/shared/utils/testServer.ts b/packages/cli/test/integration/shared/utils/testServer.ts index dfa22f173a30e..d791d7dc895e7 100644 --- a/packages/cli/test/integration/shared/utils/testServer.ts +++ b/packages/cli/test/integration/shared/utils/testServer.ts @@ -123,8 +123,8 @@ export const setupTestServer = ({ for (const group of endpointGroups) { switch (group) { case 'credentials': - const { credentialsController } = await import('@/credentials/credentials.controller'); - app.use(`/${REST_PATH_SEGMENT}/credentials`, credentialsController); + const { CredentialsController } = await import('@/credentials/credentials.controller'); + registerController(app, CredentialsController); break; case 'workflows': diff --git a/packages/cli/test/integration/workflow-tag-mapping.repository.integration.test.ts b/packages/cli/test/integration/workflow-tag-mapping.repository.integration.test.ts new file mode 100644 index 0000000000000..7cf1dcdaedbc2 --- /dev/null +++ b/packages/cli/test/integration/workflow-tag-mapping.repository.integration.test.ts @@ -0,0 +1,90 @@ +import Container from 'typedi'; + +import * as testDb from './shared/testDb'; +import { WorkflowTagMappingRepository } from '@/databases/repositories/workflowTagMapping.repository'; +import { createWorkflow } from './shared/db/workflows'; +import { TagRepository } from '@/databases/repositories/tag.repository'; + +describe('WorkflowTagMappingRepository', () => { + let taggingRepository: WorkflowTagMappingRepository; + let tagRepository: TagRepository; + + beforeAll(async () => { + await testDb.init(); + + taggingRepository = Container.get(WorkflowTagMappingRepository); + tagRepository = Container.get(TagRepository); + }); + + afterEach(async () => { + await testDb.truncate(['WorkflowTagMapping', 'Workflow', 'Tag']); + }); + + afterAll(async () => { + await testDb.terminate(); + }); + + describe('overwriteTaggings', () => { + test('should overwrite taggings in a workflow', async () => { + const workflow = await createWorkflow(); + + const oldTags = await tagRepository.save( + ['tag1', 'tag2'].map((name) => tagRepository.create({ name })), + ); + + const oldTaggings = oldTags.map((tag) => + taggingRepository.create({ + tagId: tag.id, + workflowId: workflow.id, + }), + ); + + await taggingRepository.save(oldTaggings); + + const newTags = await tagRepository.save( + ['tag3', 'tag4'].map((name) => tagRepository.create({ name })), + ); + + await taggingRepository.overwriteTaggings( + workflow.id, + newTags.map((t) => t.id), + ); + + const taggings = await taggingRepository.findBy({ workflowId: workflow.id }); + + expect(taggings).toHaveLength(2); + + const [firstNewTag, secondNewTag] = newTags; + + expect(taggings).toEqual( + expect.arrayContaining([ + expect.objectContaining({ tagId: firstNewTag.id, workflowId: workflow.id }), + expect.objectContaining({ tagId: secondNewTag.id, workflowId: workflow.id }), + ]), + ); + }); + + test('should delete taggings if no tags are provided', async () => { + const workflow = await createWorkflow(); + + const oldTags = await tagRepository.save( + ['tag1', 'tag2'].map((name) => tagRepository.create({ name })), + ); + + const oldTaggings = oldTags.map((tag) => + taggingRepository.create({ + tagId: tag.id, + workflowId: workflow.id, + }), + ); + + await taggingRepository.save(oldTaggings); + + await taggingRepository.overwriteTaggings(workflow.id, []); + + const taggings = await taggingRepository.findBy({ workflowId: workflow.id }); + + expect(taggings).toHaveLength(0); + }); + }); +}); diff --git a/packages/cli/test/integration/workflowHistoryManager.test.ts b/packages/cli/test/integration/workflowHistoryManager.test.ts index 181c7c926623c..0b20f77c4b2d0 100644 --- a/packages/cli/test/integration/workflowHistoryManager.test.ts +++ b/packages/cli/test/integration/workflowHistoryManager.test.ts @@ -1,5 +1,5 @@ import Container from 'typedi'; -import { In } from 'typeorm'; +import { In } from '@n8n/typeorm'; import { DateTime } from 'luxon'; import config from '@/config'; diff --git a/packages/cli/test/integration/workflows/workflow.service.test.ts b/packages/cli/test/integration/workflows/workflow.service.test.ts index 996b3d0d86d08..9fa11b86e108e 100644 --- a/packages/cli/test/integration/workflows/workflow.service.test.ts +++ b/packages/cli/test/integration/workflows/workflow.service.test.ts @@ -3,7 +3,7 @@ import { mock } from 'jest-mock-extended'; import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner'; import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository'; import { WorkflowRepository } from '@db/repositories/workflow.repository'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { Telemetry } from '@/telemetry'; import { OrchestrationService } from '@/services/orchestration.service'; import { WorkflowService } from '@/workflows/workflow.service'; @@ -83,35 +83,4 @@ describe('update()', () => { expect(addSpy).not.toHaveBeenCalled(); }); - - test('should broadcast active workflow state change if state changed', async () => { - const owner = await createOwner(); - const workflow = await createWorkflow({ active: true }, owner); - - const publishSpy = jest.spyOn(orchestrationService, 'publish'); - - workflow.active = false; - await workflowService.update(owner, workflow, workflow.id); - - expect(publishSpy).toHaveBeenCalledTimes(1); - expect(publishSpy).toHaveBeenCalledWith( - 'workflowActiveStateChanged', - expect.objectContaining({ - newState: false, - oldState: true, - workflowId: workflow.id, - }), - ); - }); - - test('should not broadcast active workflow state change if state did not change', async () => { - const owner = await createOwner(); - const workflow = await createWorkflow({ active: true }, owner); - - const publishSpy = jest.spyOn(orchestrationService, 'publish'); - - await workflowService.update(owner, workflow, workflow.id); - - expect(publishSpy).not.toHaveBeenCalled(); - }); }); diff --git a/packages/cli/test/teardown.ts b/packages/cli/test/teardown.ts index 57ab9334192a5..25fd4e14fe63e 100644 --- a/packages/cli/test/teardown.ts +++ b/packages/cli/test/teardown.ts @@ -1,5 +1,5 @@ import 'tsconfig-paths/register'; -import { DataSource as Connection } from 'typeorm'; +import { DataSource as Connection } from '@n8n/typeorm'; import config from '@/config'; import { getBootstrapDBOptions, testDbPrefix } from './integration/shared/testDb'; diff --git a/packages/cli/test/unit/ActiveExecutions.test.ts b/packages/cli/test/unit/ActiveExecutions.test.ts index f30fd3778ab6d..892d976b4bd3c 100644 --- a/packages/cli/test/unit/ActiveExecutions.test.ts +++ b/packages/cli/test/unit/ActiveExecutions.test.ts @@ -45,11 +45,7 @@ describe('ActiveExecutions', () => { test('Should update execution if add is called with execution ID', async () => { const newExecution = mockExecutionData(); - const executionId = await activeExecutions.add( - newExecution, - undefined, - FAKE_SECOND_EXECUTION_ID, - ); + const executionId = await activeExecutions.add(newExecution, FAKE_SECOND_EXECUTION_ID); expect(executionId).toBe(FAKE_SECOND_EXECUTION_ID); expect(activeExecutions.getActiveExecutions().length).toBe(1); @@ -67,7 +63,7 @@ describe('ActiveExecutions', () => { test('Should successfully attach execution to valid executionId', async () => { const newExecution = mockExecutionData(); - await activeExecutions.add(newExecution, undefined, FAKE_EXECUTION_ID); + await activeExecutions.add(newExecution, FAKE_EXECUTION_ID); const deferredPromise = mockCancelablePromise(); expect(() => @@ -77,7 +73,7 @@ describe('ActiveExecutions', () => { test('Should attach and resolve response promise to existing execution', async () => { const newExecution = mockExecutionData(); - await activeExecutions.add(newExecution, undefined, FAKE_EXECUTION_ID); + await activeExecutions.add(newExecution, FAKE_EXECUTION_ID); const deferredPromise = await mockDeferredPromise(); activeExecutions.attachResponsePromise(FAKE_EXECUTION_ID, deferredPromise); const fakeResponse = { data: { resultData: { runData: {} } } }; @@ -129,6 +125,7 @@ function mockExecutionData(): IWorkflowExecutionDataProcess { return { executionMode: 'manual', workflowData: { + id: '123', name: 'Test workflow 1', active: false, createdAt: new Date(), diff --git a/packages/cli/test/unit/InternalHooks.test.ts b/packages/cli/test/unit/InternalHooks.test.ts index 4bd10033ac6ce..46ea31623914a 100644 --- a/packages/cli/test/unit/InternalHooks.test.ts +++ b/packages/cli/test/unit/InternalHooks.test.ts @@ -1,67 +1,80 @@ -import { Telemetry } from '@/telemetry'; -import { InternalHooks } from '@/InternalHooks'; -import { mockInstance } from '../shared/mocking'; -import type { IDiagnosticInfo } from '@/Interfaces'; import { mock } from 'jest-mock-extended'; +import config from '@/config'; +import { N8N_VERSION } from '@/constants'; +import { InternalHooks } from '@/InternalHooks'; +import type { License } from '@/License'; +import type { Telemetry } from '@/telemetry'; -jest.mock('@/telemetry'); - -let internalHooks: InternalHooks; -let telemetry: Telemetry; +jest.mock('node:os', () => ({ + tmpdir: () => '', + cpus: () => [{ model: 'MIPS R3000', speed: 40_000_000 }], + type: () => 'TempleOS', + version: () => '5.03', + totalmem: () => 1024 * 1024, +})); describe('InternalHooks', () => { - beforeAll(() => { - telemetry = mockInstance(Telemetry); - internalHooks = new InternalHooks(telemetry, mock(), mock(), mock(), mock(), mock()); - }); + const telemetry = mock(); + const license = mock(); + const internalHooks = new InternalHooks( + telemetry, + mock(), + mock(), + mock(), + mock(), + mock(), + mock(), + license, + ); + + beforeEach(() => jest.clearAllMocks()); it('Should be defined', () => { expect(internalHooks).toBeDefined(); }); it('Should forward license plan name and tenant id to identify when provided', async () => { - const licensePlanName = 'license-plan-name'; - const licenseTenantId = 1001; + license.getPlanName.mockReturnValue('Best Plan'); - const diagnosticInfo: IDiagnosticInfo = { - versionCli: '1.2.3', - databaseType: 'sqlite', - notificationsEnabled: true, - disableProductionWebhooksOnMainProcess: false, - systemInfo: { - os: {}, - cpus: {}, + await internalHooks.onServerStarted(); + + expect(telemetry.identify).toHaveBeenCalledWith({ + version_cli: N8N_VERSION, + db_type: config.get('database.type'), + n8n_version_notifications_enabled: true, + n8n_disable_production_main_process: false, + system_info: { + memory: 1024, + os: { + type: 'TempleOS', + version: '5.03', + }, + cpus: { + count: 1, + model: 'MIPS R3000', + speed: 40000000, + }, + }, + execution_variables: { + executions_data_max_age: 336, + executions_data_prune: true, + executions_data_save_manual_executions: true, + executions_data_save_on_error: 'all', + executions_data_save_on_progress: false, + executions_data_save_on_success: 'all', + executions_mode: 'regular', + executions_timeout: -1, + executions_timeout_max: 3600, }, - executionVariables: {}, - deploymentType: 'testing', - binaryDataMode: 'default', - smtp_set_up: false, - ldap_allowed: true, - saml_enabled: true, - licensePlanName, - licenseTenantId, + n8n_deployment_type: 'default', + n8n_binary_data_mode: 'default', + smtp_set_up: true, + ldap_allowed: false, + saml_enabled: false, + license_plan_name: 'Best Plan', + license_tenant_id: 1, binary_data_s3: false, multi_main_setup_enabled: false, - }; - - const parameters = { - version_cli: diagnosticInfo.versionCli, - db_type: diagnosticInfo.databaseType, - n8n_version_notifications_enabled: diagnosticInfo.notificationsEnabled, - n8n_disable_production_main_process: diagnosticInfo.disableProductionWebhooksOnMainProcess, - system_info: diagnosticInfo.systemInfo, - execution_variables: diagnosticInfo.executionVariables, - n8n_deployment_type: diagnosticInfo.deploymentType, - n8n_binary_data_mode: diagnosticInfo.binaryDataMode, - smtp_set_up: diagnosticInfo.smtp_set_up, - ldap_allowed: diagnosticInfo.ldap_allowed, - saml_enabled: diagnosticInfo.saml_enabled, - license_plan_name: diagnosticInfo.licensePlanName, - license_tenant_id: diagnosticInfo.licenseTenantId, - }; - - await internalHooks.onServerStarted(diagnosticInfo); - - expect(telemetry.identify).toHaveBeenCalledWith(parameters); + }); }); }); diff --git a/packages/cli/test/unit/PermissionChecker.test.ts b/packages/cli/test/unit/PermissionChecker.test.ts index 5eb4b6e0ea7cd..7e3336230c2ac 100644 --- a/packages/cli/test/unit/PermissionChecker.test.ts +++ b/packages/cli/test/unit/PermissionChecker.test.ts @@ -1,385 +1,137 @@ -import { v4 as uuid } from 'uuid'; -import { Container } from 'typedi'; -import type { WorkflowSettings } from 'n8n-workflow'; -import { SubworkflowOperationError, Workflow } from 'n8n-workflow'; - -import config from '@/config'; -import { User } from '@db/entities/User'; -import { WorkflowRepository } from '@db/repositories/workflow.repository'; -import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository'; -import { UserRepository } from '@/databases/repositories/user.repository'; -import { generateNanoId } from '@/databases/utils/generators'; -import { License } from '@/License'; -import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials'; -import { NodeTypes } from '@/NodeTypes'; -import { OwnershipService } from '@/services/ownership.service'; +import { type INodeTypes, Workflow } from 'n8n-workflow'; +import { mock } from 'jest-mock-extended'; +import type { User } from '@db/entities/User'; +import type { UserRepository } from '@db/repositories/user.repository'; +import type { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository'; +import type { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository'; +import type { License } from '@/License'; import { PermissionChecker } from '@/UserManagement/PermissionChecker'; -import { mockInstance } from '../shared/mocking'; -import { - randomCredentialPayload as randomCred, - randomName, - randomPositiveDigit, -} from '../integration/shared/random'; -import { LicenseMocker } from '../integration/shared/license'; -import * as testDb from '../integration/shared/testDb'; -import type { SaveCredentialFunction } from '../integration/shared/types'; -import { mockNodeTypesData } from './Helpers'; -import { affixRoleToSaveCredential } from '../integration/shared/db/credentials'; -import { createOwner, createUser } from '../integration/shared/db/users'; - -export const toTargetCallErrorMsg = (subworkflowId: string) => - `Target workflow ID ${subworkflowId} may not be called`; - -export function createParentWorkflow() { - return Container.get(WorkflowRepository).create({ - id: generateNanoId(), - name: randomName(), +describe('PermissionChecker', () => { + const user = mock(); + const userRepo = mock(); + const sharedCredentialsRepo = mock(); + const sharedWorkflowRepo = mock(); + const license = mock(); + const permissionChecker = new PermissionChecker( + userRepo, + sharedCredentialsRepo, + sharedWorkflowRepo, + mock(), + license, + ); + + const workflow = new Workflow({ + id: '1', + name: 'test', active: false, connections: {}, + nodeTypes: mock(), nodes: [ { - name: '', + id: 'node-id', + name: 'HTTP Request', + type: 'n8n-nodes-base.httpRequest', + parameters: {}, typeVersion: 1, - type: 'n8n-nodes-base.executeWorkflow', position: [0, 0], - parameters: {}, - }, - ], - }); -} - -export function createSubworkflow({ - policy, - callerIds, -}: { - policy?: WorkflowSettings.CallerPolicy; - callerIds?: string; -} = {}) { - return new Workflow({ - id: uuid(), - nodes: [], - connections: {}, - active: false, - nodeTypes: mockNodeTypes, - settings: { - ...(policy ? { callerPolicy: policy } : {}), - ...(callerIds ? { callerIds } : {}), - }, - }); -} - -let saveCredential: SaveCredentialFunction; - -const mockNodeTypes = mockInstance(NodeTypes); -mockInstance(LoadNodesAndCredentials, { - loadedNodes: mockNodeTypesData(['start', 'actionNetwork']), -}); - -let permissionChecker: PermissionChecker; - -beforeAll(async () => { - await testDb.init(); - - saveCredential = affixRoleToSaveCredential('credential:owner'); - - permissionChecker = Container.get(PermissionChecker); -}); - -describe('check()', () => { - beforeEach(async () => { - await testDb.truncate(['Workflow', 'Credentials']); - }); - - afterAll(async () => { - await testDb.terminate(); - }); - - test('should allow if workflow has no creds', async () => { - const userId = uuid(); - - const workflow = new Workflow({ - id: randomPositiveDigit().toString(), - name: 'test', - active: false, - connections: {}, - nodeTypes: mockNodeTypes, - nodes: [ - { - id: uuid(), - name: 'Start', - type: 'n8n-nodes-base.start', - typeVersion: 1, - parameters: {}, - position: [0, 0], - }, - ], - }); - - expect(async () => await permissionChecker.check(workflow, userId)).not.toThrow(); - }); - - test('should allow if requesting user is instance owner', async () => { - const owner = await createOwner(); - - const workflow = new Workflow({ - id: randomPositiveDigit().toString(), - name: 'test', - active: false, - connections: {}, - nodeTypes: mockNodeTypes, - nodes: [ - { - id: uuid(), - name: 'Action Network', - type: 'n8n-nodes-base.actionNetwork', - parameters: {}, - typeVersion: 1, - position: [0, 0], - credentials: { - actionNetworkApi: { - id: randomPositiveDigit().toString(), - name: 'Action Network Account', - }, - }, - }, - ], - }); - - expect(async () => await permissionChecker.check(workflow, owner.id)).not.toThrow(); - }); - - test('should allow if workflow creds are valid subset', async () => { - const [owner, member] = await Promise.all([createOwner(), createUser()]); - - const ownerCred = await saveCredential(randomCred(), { user: owner }); - const memberCred = await saveCredential(randomCred(), { user: member }); - - const workflow = new Workflow({ - id: randomPositiveDigit().toString(), - name: 'test', - active: false, - connections: {}, - nodeTypes: mockNodeTypes, - nodes: [ - { - id: uuid(), - name: 'Action Network', - type: 'n8n-nodes-base.actionNetwork', - parameters: {}, - typeVersion: 1, - position: [0, 0], - credentials: { - actionNetworkApi: { - id: ownerCred.id, - name: ownerCred.name, - }, - }, - }, - { - id: uuid(), - name: 'Action Network 2', - type: 'n8n-nodes-base.actionNetwork', - parameters: {}, - typeVersion: 1, - position: [0, 0], - credentials: { - actionNetworkApi: { - id: memberCred.id, - name: memberCred.name, - }, - }, - }, - ], - }); - - expect(async () => await permissionChecker.check(workflow, owner.id)).not.toThrow(); - }); - - test('should deny if workflow creds are not valid subset', async () => { - const member = await createUser(); - - const memberCred = await saveCredential(randomCred(), { user: member }); - - const workflowDetails = { - id: randomPositiveDigit().toString(), - name: 'test', - active: false, - connections: {}, - nodeTypes: mockNodeTypes, - nodes: [ - { - id: uuid(), - name: 'Action Network', - type: 'n8n-nodes-base.actionNetwork', - parameters: {}, - typeVersion: 1, - position: [0, 0] as [number, number], - credentials: { - actionNetworkApi: { - id: memberCred.id, - name: memberCred.name, - }, + credentials: { + oAuth2Api: { + id: 'cred-id', + name: 'Custom oAuth2', }, }, - { - id: uuid(), - name: 'Action Network 2', - type: 'n8n-nodes-base.actionNetwork', - parameters: {}, - typeVersion: 1, - position: [0, 0] as [number, number], - credentials: { - actionNetworkApi: { - id: 'non-existing-credential-id', - name: 'Non-existing credential name', - }, - }, - }, - ], - }; - - const workflowEntity = await Container.get(WorkflowRepository).save(workflowDetails); - - await Container.get(SharedWorkflowRepository).save({ - workflow: workflowEntity, - user: member, - role: 'workflow:owner', - }); - - const workflow = new Workflow(workflowDetails); - - await expect(permissionChecker.check(workflow, member.id)).rejects.toThrow(); - }); -}); - -describe('checkSubworkflowExecutePolicy()', () => { - const ownershipService = mockInstance(OwnershipService); - - let license: LicenseMocker; - - beforeAll(() => { - license = new LicenseMocker(); - license.mock(Container.get(License)); - license.enable('feat:sharing'); + }, + ], }); - describe('no caller policy', () => { - test('should fall back to N8N_WORKFLOW_CALLER_POLICY_DEFAULT_OPTION', async () => { - config.set('workflows.callerPolicyDefaultOption', 'none'); - - const parentWorkflow = createParentWorkflow(); - const subworkflow = createSubworkflow(); // no caller policy + beforeEach(() => jest.clearAllMocks()); - ownershipService.getWorkflowOwnerCached.mockResolvedValue(new User()); - - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); - - await expect(check).rejects.toThrow(toTargetCallErrorMsg(subworkflow.id)); - - config.load(config.default); + describe('check', () => { + it('should throw if no user is found', async () => { + userRepo.findOneOrFail.mockRejectedValue(new Error('Fail')); + await expect(permissionChecker.check(workflow, '123')).rejects.toThrow(); + expect(license.isSharingEnabled).not.toHaveBeenCalled(); + expect(sharedWorkflowRepo.getSharedUserIds).not.toBeCalled(); + expect(sharedCredentialsRepo.getOwnedCredentialIds).not.toHaveBeenCalled(); + expect(sharedCredentialsRepo.getAccessibleCredentialIds).not.toHaveBeenCalled(); }); - }); - - describe('overridden caller policy', () => { - test('if no sharing, should override policy to workflows-from-same-owner', async () => { - license.disable('feat:sharing'); - - const parentWorkflow = createParentWorkflow(); - const subworkflow = createSubworkflow({ policy: 'any' }); // should be overridden - const firstUser = Container.get(UserRepository).create({ id: uuid() }); - const secondUser = Container.get(UserRepository).create({ id: uuid() }); - - ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(firstUser); // parent workflow - ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(secondUser); // subworkflow - - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); - - await expect(check).rejects.toThrow(toTargetCallErrorMsg(subworkflow.id)); - - try { - await permissionChecker.checkSubworkflowExecutePolicy(subworkflow, uuid()); - } catch (error) { - if (error instanceof SubworkflowOperationError) { - expect(error.description).toBe( - `${firstUser.firstName} (${firstUser.email}) can make this change. You may need to tell them the ID of this workflow, which is ${subworkflow.id}`, - ); - } - } - - license.enable('feat:sharing'); + it('should allow a user if they have a global `workflow:execute` scope', async () => { + userRepo.findOneOrFail.mockResolvedValue(user); + user.hasGlobalScope.calledWith('workflow:execute').mockReturnValue(true); + await expect(permissionChecker.check(workflow, user.id)).resolves.not.toThrow(); + expect(license.isSharingEnabled).not.toHaveBeenCalled(); + expect(sharedWorkflowRepo.getSharedUserIds).not.toBeCalled(); + expect(sharedCredentialsRepo.getOwnedCredentialIds).not.toHaveBeenCalled(); + expect(sharedCredentialsRepo.getAccessibleCredentialIds).not.toHaveBeenCalled(); }); - }); - describe('workflows-from-list caller policy', () => { - test('should allow if caller list contains parent workflow ID', async () => { - const parentWorkflow = createParentWorkflow(); - - const subworkflow = createSubworkflow({ - policy: 'workflowsFromAList', - callerIds: `123,456,bcdef, ${parentWorkflow.id}`, + describe('When sharing is disabled', () => { + beforeEach(() => { + userRepo.findOneOrFail.mockResolvedValue(user); + user.hasGlobalScope.calledWith('workflow:execute').mockReturnValue(false); + license.isSharingEnabled.mockReturnValue(false); }); - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); - - await expect(check).resolves.not.toThrow(); - }); + it('should validate credential access using only owned credentials', async () => { + sharedCredentialsRepo.getOwnedCredentialIds.mockResolvedValue(['cred-id']); - test('should deny if caller list does not contain parent workflow ID', async () => { - const parentWorkflow = createParentWorkflow(); + await expect(permissionChecker.check(workflow, user.id)).resolves.not.toThrow(); - const subworkflow = createSubworkflow({ - policy: 'workflowsFromAList', - callerIds: 'xyz', + expect(sharedWorkflowRepo.getSharedUserIds).not.toBeCalled(); + expect(sharedCredentialsRepo.getOwnedCredentialIds).toBeCalledWith([user.id]); + expect(sharedCredentialsRepo.getAccessibleCredentialIds).not.toHaveBeenCalled(); }); - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); - - await expect(check).rejects.toThrow(); - }); - }); + it('should throw when the user does not have access to the credential', async () => { + sharedCredentialsRepo.getOwnedCredentialIds.mockResolvedValue(['cred-id2']); - describe('any caller policy', () => { - test('should not throw', async () => { - const parentWorkflow = createParentWorkflow(); - const subworkflow = createSubworkflow({ policy: 'any' }); - ownershipService.getWorkflowOwnerCached.mockResolvedValue(new User()); + await expect(permissionChecker.check(workflow, user.id)).rejects.toThrow( + 'Node has no access to credential', + ); - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); - - await expect(check).resolves.not.toThrow(); + expect(sharedWorkflowRepo.getSharedUserIds).not.toBeCalled(); + expect(sharedCredentialsRepo.getOwnedCredentialIds).toBeCalledWith([user.id]); + expect(sharedCredentialsRepo.getAccessibleCredentialIds).not.toHaveBeenCalled(); + }); }); - }); - - describe('workflows-from-same-owner caller policy', () => { - test('should deny if the two workflows are owned by different users', async () => { - const parentWorkflowOwner = Container.get(UserRepository).create({ id: uuid() }); - const subworkflowOwner = Container.get(UserRepository).create({ id: uuid() }); - - ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(parentWorkflowOwner); // parent workflow - ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(subworkflowOwner); // subworkflow - - const subworkflow = createSubworkflow({ policy: 'workflowsFromSameOwner' }); - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, uuid()); - - await expect(check).rejects.toThrow(toTargetCallErrorMsg(subworkflow.id)); - }); + describe('When sharing is enabled', () => { + beforeEach(() => { + userRepo.findOneOrFail.mockResolvedValue(user); + user.hasGlobalScope.calledWith('workflow:execute').mockReturnValue(false); + license.isSharingEnabled.mockReturnValue(true); + sharedWorkflowRepo.getSharedUserIds.mockResolvedValue([user.id, 'another-user']); + }); - test('should allow if both workflows are owned by the same user', async () => { - const parentWorkflow = createParentWorkflow(); + it('should validate credential access using only owned credentials', async () => { + sharedCredentialsRepo.getAccessibleCredentialIds.mockResolvedValue(['cred-id']); - const bothWorkflowsOwner = Container.get(UserRepository).create({ id: uuid() }); + await expect(permissionChecker.check(workflow, user.id)).resolves.not.toThrow(); - ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(bothWorkflowsOwner); // parent workflow - ownershipService.getWorkflowOwnerCached.mockResolvedValueOnce(bothWorkflowsOwner); // subworkflow + expect(sharedWorkflowRepo.getSharedUserIds).toBeCalledWith(workflow.id); + expect(sharedCredentialsRepo.getAccessibleCredentialIds).toBeCalledWith([ + user.id, + 'another-user', + ]); + expect(sharedCredentialsRepo.getOwnedCredentialIds).not.toHaveBeenCalled(); + }); - const subworkflow = createSubworkflow({ policy: 'workflowsFromSameOwner' }); + it('should throw when the user does not have access to the credential', async () => { + sharedCredentialsRepo.getAccessibleCredentialIds.mockResolvedValue(['cred-id2']); - const check = permissionChecker.checkSubworkflowExecutePolicy(subworkflow, parentWorkflow.id); + await expect(permissionChecker.check(workflow, user.id)).rejects.toThrow( + 'Node has no access to credential', + ); - await expect(check).resolves.not.toThrow(); + expect(sharedWorkflowRepo.find).not.toBeCalled(); + expect(sharedCredentialsRepo.getAccessibleCredentialIds).toBeCalledWith([ + user.id, + 'another-user', + ]); + expect(sharedCredentialsRepo.getOwnedCredentialIds).not.toHaveBeenCalled(); + }); }); }); }); diff --git a/packages/cli/test/unit/repositories/execution.repository.test.ts b/packages/cli/test/unit/repositories/execution.repository.test.ts index e101e77557204..57a223df25e2d 100644 --- a/packages/cli/test/unit/repositories/execution.repository.test.ts +++ b/packages/cli/test/unit/repositories/execution.repository.test.ts @@ -1,7 +1,7 @@ import { mock } from 'jest-mock-extended'; import Container from 'typedi'; -import type { EntityMetadata } from 'typeorm'; -import { EntityManager, DataSource, Not, LessThanOrEqual } from 'typeorm'; +import type { EntityMetadata } from '@n8n/typeorm'; +import { EntityManager, DataSource, Not, LessThanOrEqual } from '@n8n/typeorm'; import config from '@/config'; import { ExecutionEntity } from '@db/entities/ExecutionEntity'; diff --git a/packages/cli/test/unit/repositories/sharedCredentials.repository.test.ts b/packages/cli/test/unit/repositories/sharedCredentials.repository.test.ts index f14c2ee3548b4..4a808bfb35da6 100644 --- a/packages/cli/test/unit/repositories/sharedCredentials.repository.test.ts +++ b/packages/cli/test/unit/repositories/sharedCredentials.repository.test.ts @@ -1,5 +1,5 @@ import { Container } from 'typedi'; -import { DataSource, EntityManager, type EntityMetadata } from 'typeorm'; +import { DataSource, EntityManager, type EntityMetadata } from '@n8n/typeorm'; import { mock } from 'jest-mock-extended'; import type { User } from '@db/entities/User'; import type { CredentialsEntity } from '@db/entities/CredentialsEntity'; diff --git a/packages/cli/test/unit/repositories/workflowStatistics.test.ts b/packages/cli/test/unit/repositories/workflowStatistics.test.ts index 7aa0280600aab..ea56b2d84c8ed 100644 --- a/packages/cli/test/unit/repositories/workflowStatistics.test.ts +++ b/packages/cli/test/unit/repositories/workflowStatistics.test.ts @@ -1,5 +1,5 @@ import { WorkflowStatisticsRepository } from '@db/repositories/workflowStatistics.repository'; -import { DataSource, EntityManager, InsertResult, QueryFailedError } from 'typeorm'; +import { DataSource, EntityManager, InsertResult, QueryFailedError } from '@n8n/typeorm'; import { mockInstance } from '../../shared/mocking'; import { mock, mockClear } from 'jest-mock-extended'; import { StatisticsNames, WorkflowStatistics } from '@/databases/entities/WorkflowStatistics'; diff --git a/packages/cli/test/unit/services/events.service.test.ts b/packages/cli/test/unit/services/events.service.test.ts index 6504b691e80dd..afdd4091d3186 100644 --- a/packages/cli/test/unit/services/events.service.test.ts +++ b/packages/cli/test/unit/services/events.service.test.ts @@ -4,7 +4,7 @@ import { type DataSource, type EntityManager, type EntityMetadata, -} from 'typeorm'; +} from '@n8n/typeorm'; import { mocked } from 'jest-mock'; import { mock } from 'jest-mock-extended'; diff --git a/packages/cli/test/unit/services/orchestration.service.test.ts b/packages/cli/test/unit/services/orchestration.service.test.ts index 2dfd1519a6c2b..d755c73fcb17b 100644 --- a/packages/cli/test/unit/services/orchestration.service.test.ts +++ b/packages/cli/test/unit/services/orchestration.service.test.ts @@ -2,7 +2,7 @@ import Container from 'typedi'; import config from '@/config'; import { OrchestrationService } from '@/services/orchestration.service'; import type { RedisServiceWorkerResponseObject } from '@/services/redis/RedisServiceCommands'; -import { MessageEventBus } from '@/eventbus'; +import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus'; import { RedisService } from '@/services/redis.service'; import { handleWorkerResponseMessageMain } from '@/services/orchestration/main/handleWorkerResponseMessageMain'; import { handleCommandMessageMain } from '@/services/orchestration/main/handleCommandMessageMain'; diff --git a/packages/core/package.json b/packages/core/package.json index 69cdc05f9a1da..5524aba8987f0 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "n8n-core", - "version": "1.26.0", + "version": "1.28.0", "description": "Core functionality of n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", diff --git a/packages/core/src/Interfaces.ts b/packages/core/src/Interfaces.ts index 40e5d88eee6a8..000f2a8125b07 100644 --- a/packages/core/src/Interfaces.ts +++ b/packages/core/src/Interfaces.ts @@ -7,12 +7,6 @@ import type { export type Class = new (...args: A) => T; -export interface IProcessMessage { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - data?: any; - type: string; -} - export interface IResponseError extends Error { statusCode?: number; } diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 6079fea6347a2..99fe4bf28006d 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -31,7 +31,7 @@ import FormData from 'form-data'; import { createReadStream } from 'fs'; import { access as fsAccess, writeFile as fsWriteFile } from 'fs/promises'; import { IncomingMessage, type IncomingHttpHeaders } from 'http'; -import { Agent } from 'https'; +import { Agent, type AgentOptions } from 'https'; import get from 'lodash/get'; import pick from 'lodash/pick'; import { extension, lookup } from 'mime-types'; @@ -229,7 +229,22 @@ async function generateContentLengthHeader(config: AxiosRequestConfig) { } } -async function parseRequestObject(requestObject: IDataObject) { +const getHostFromRequestObject = ( + requestObject: Partial<{ + url: string; + uri: string; + baseURL: string; + }>, +): string | null => { + try { + const url = (requestObject.url ?? requestObject.uri) as string; + return new URL(url, requestObject.baseURL).hostname; + } catch (error) { + return null; + } +}; + +export async function parseRequestObject(requestObject: IDataObject) { // This function is a temporary implementation // That translates all http requests done via // the request library to axios directly @@ -452,6 +467,15 @@ async function parseRequestObject(requestObject: IDataObject) { axiosConfig.maxRedirects = 0; } + axiosConfig.beforeRedirect = (redirectedRequest) => { + if (axiosConfig.headers?.Authorization) { + redirectedRequest.headers.Authorization = axiosConfig.headers.Authorization; + } + if (axiosConfig.auth) { + redirectedRequest.auth = `${axiosConfig.auth.username}:${axiosConfig.auth.password}`; + } + }; + if (requestObject.rejectUnauthorized === false) { axiosConfig.httpsAgent = new Agent({ rejectUnauthorized: false, @@ -459,6 +483,17 @@ async function parseRequestObject(requestObject: IDataObject) { }); } + const host = getHostFromRequestObject(requestObject); + const agentOptions: AgentOptions = {}; + if (host) { + agentOptions.servername = host; + } + if (requestObject.rejectUnauthorized === false) { + agentOptions.rejectUnauthorized = false; + agentOptions.secureOptions = crypto.constants.SSL_OP_LEGACY_SERVER_CONNECT; + } + axiosConfig.httpsAgent = new Agent(agentOptions); + if (requestObject.timeout !== undefined) { axiosConfig.timeout = requestObject.timeout as number; } @@ -723,14 +758,11 @@ export async function proxyRequestToAxios( maxBodyLength: Infinity, maxContentLength: Infinity, }; - let configObject: ConfigObject; - if (uriOrObject !== undefined && typeof uriOrObject === 'string') { - axiosConfig.url = uriOrObject; - } - if (uriOrObject !== undefined && typeof uriOrObject === 'object') { - configObject = uriOrObject; + let configObject: ConfigObject & { uri?: string }; + if (typeof uriOrObject === 'string') { + configObject = { uri: uriOrObject, ...options }; } else { - configObject = options || {}; + configObject = uriOrObject ?? {}; } axiosConfig = Object.assign(axiosConfig, await parseRequestObject(configObject)); @@ -850,11 +882,15 @@ function convertN8nRequestToAxios(n8nRequest: IHttpRequestOptions): AxiosRequest axiosRequest.responseType = n8nRequest.encoding; } + const host = getHostFromRequestObject(n8nRequest); + const agentOptions: AgentOptions = {}; + if (host) { + agentOptions.servername = host; + } if (n8nRequest.skipSslCertificateValidation === true) { - axiosRequest.httpsAgent = new Agent({ - rejectUnauthorized: false, - }); + agentOptions.rejectUnauthorized = false; } + axiosRequest.httpsAgent = new Agent(agentOptions); if (n8nRequest.arrayFormat !== undefined) { axiosRequest.paramsSerializer = (params) => { @@ -1199,6 +1235,23 @@ async function prepareBinaryData( return await setBinaryDataBuffer(returnData, binaryData, workflowId, executionId); } +function applyPaginationRequestData( + requestData: OptionsWithUri, + paginationRequestData: PaginationOptions['request'], +): OptionsWithUri { + const preparedPaginationData: Partial = { ...paginationRequestData }; + + if ('formData' in requestData) { + preparedPaginationData.formData = paginationRequestData.body; + delete preparedPaginationData.body; + } else if ('form' in requestData) { + preparedPaginationData.form = paginationRequestData.body; + delete preparedPaginationData.body; + } + + return merge({}, requestData, preparedPaginationData); +} + /** * Makes a request using OAuth data for authentication * @@ -1714,6 +1767,7 @@ export async function requestWithAuthentication( node: INode, additionalData: IWorkflowExecuteAdditionalData, additionalCredentialOptions?: IAdditionalCredentialOptions, + itemIndex?: number, ) { let credentialsDecrypted: ICredentialDataDecryptedObject | undefined; @@ -1738,7 +1792,7 @@ export async function requestWithAuthentication( if (additionalCredentialOptions?.credentialsDecrypted) { credentialsDecrypted = additionalCredentialOptions.credentialsDecrypted.data; } else { - credentialsDecrypted = await this.getCredentials(credentialsType); + credentialsDecrypted = await this.getCredentials(credentialsType, itemIndex); } if (credentialsDecrypted === undefined) { @@ -2494,7 +2548,7 @@ const addExecutionDataFunctions = async ( runExecutionData.executionData!.metadata = {}; } - let sourceTaskData = get(runExecutionData, `executionData.metadata[${sourceNodeName}]`); + let sourceTaskData = get(runExecutionData, ['executionData', 'metadata', sourceNodeName]); if (!sourceTaskData) { runExecutionData.executionData!.metadata[sourceNodeName] = []; @@ -2796,7 +2850,7 @@ const getRequestHelperFunctions = ( let tempResponseData: IN8nHttpFullResponse; let makeAdditionalRequest: boolean; - let paginateRequestData: IHttpRequestOptions; + let paginateRequestData: PaginationOptions['request']; const runIndex = 0; @@ -2826,9 +2880,9 @@ const getRequestHelperFunctions = ( executeData, additionalKeys, false, - ) as object as IHttpRequestOptions; + ) as object as PaginationOptions['request']; - const tempRequestOptions = merge(requestOptions, paginateRequestData); + const tempRequestOptions = applyPaginationRequestData(requestOptions, paginateRequestData); if (credentialsType) { tempResponseData = await this.helpers.requestWithAuthentication.call( @@ -3000,6 +3054,7 @@ const getRequestHelperFunctions = ( credentialsType, requestOptions, additionalCredentialOptions, + itemIndex, ): Promise { return await requestWithAuthentication.call( this, @@ -3009,6 +3064,7 @@ const getRequestHelperFunctions = ( node, additionalData, additionalCredentialOptions, + itemIndex, ); }, diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index a225515299121..b768911f7f8de 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -338,10 +338,13 @@ export class WorkflowExecute { ): boolean { // for (const inputConnection of workflow.connectionsByDestinationNode[nodeToAdd].main[0]) { for (const inputConnection of inputConnections) { - const nodeIncomingData = get( - runData, - `[${inputConnection.node}][${runIndex}].data.main[${inputConnection.index}]`, - ); + const nodeIncomingData = get(runData, [ + inputConnection.node, + runIndex, + 'data', + 'main', + inputConnection.index, + ]); if (nodeIncomingData !== undefined && (nodeIncomingData as object[]).length !== 0) { return false; } diff --git a/packages/core/test/NodeExecuteFunctions.test.ts b/packages/core/test/NodeExecuteFunctions.test.ts index 6b4d62cb26f37..7f8fe4296c5a0 100644 --- a/packages/core/test/NodeExecuteFunctions.test.ts +++ b/packages/core/test/NodeExecuteFunctions.test.ts @@ -2,6 +2,7 @@ import { copyInputItems, getBinaryDataBuffer, parseIncomingMessage, + parseRequestObject, proxyRequestToAxios, setBinaryDataBuffer, } from '@/NodeExecuteFunctions'; @@ -21,6 +22,7 @@ import nock from 'nock'; import { tmpdir } from 'os'; import { join } from 'path'; import Container from 'typedi'; +import type { Agent } from 'https'; const temporaryDir = mkdtempSync(join(tmpdir(), 'n8n')); @@ -295,6 +297,77 @@ describe('NodeExecuteFunctions', () => { node, ]); }); + + describe('redirects', () => { + test('should forward authorization header', async () => { + nock(baseUrl).get('/redirect').reply(301, '', { Location: 'https://otherdomain.com/test' }); + nock('https://otherdomain.com') + .get('/test') + .reply(200, function () { + return this.req.headers; + }); + + const response = await proxyRequestToAxios(workflow, additionalData, node, { + url: `${baseUrl}/redirect`, + auth: { + username: 'testuser', + password: 'testpassword', + }, + headers: { + 'X-Other-Header': 'otherHeaderContent', + }, + resolveWithFullResponse: true, + }); + + expect(response.statusCode).toBe(200); + const forwardedHeaders = JSON.parse(response.body); + expect(forwardedHeaders.authorization).toBe('Basic dGVzdHVzZXI6dGVzdHBhc3N3b3Jk'); + expect(forwardedHeaders['x-other-header']).toBe('otherHeaderContent'); + }); + + test('should follow redirects by default', async () => { + nock(baseUrl) + .get('/redirect') + .reply(301, '', { Location: `${baseUrl}/test` }); + nock(baseUrl).get('/test').reply(200, 'Redirected'); + + const response = await proxyRequestToAxios(workflow, additionalData, node, { + url: `${baseUrl}/redirect`, + resolveWithFullResponse: true, + }); + + expect(response).toMatchObject({ + body: 'Redirected', + headers: {}, + statusCode: 200, + }); + }); + + test('should not follow redirects when configured', async () => { + nock(baseUrl) + .get('/redirect') + .reply(301, '', { Location: `${baseUrl}/test` }); + nock(baseUrl).get('/test').reply(200, 'Redirected'); + + await expect( + proxyRequestToAxios(workflow, additionalData, node, { + url: `${baseUrl}/redirect`, + resolveWithFullResponse: true, + followRedirect: false, + }), + ).rejects.toThrowError(expect.objectContaining({ statusCode: 301 })); + }); + }); + }); + + describe('parseRequestObject', () => { + test('should not use Host header for SNI', async () => { + const axiosOptions = await parseRequestObject({ + url: 'https://example.de/foo/bar', + headers: { Host: 'other.host.com' }, + }); + expect((axiosOptions.httpsAgent as Agent).options.servername).toEqual('example.de'); + }); }); describe('copyInputItems', () => { diff --git a/packages/design-system/package.json b/packages/design-system/package.json index 2d1a325af81f4..2a78cddc40f13 100644 --- a/packages/design-system/package.json +++ b/packages/design-system/package.json @@ -1,6 +1,6 @@ { "name": "n8n-design-system", - "version": "1.20.0", + "version": "1.21.0", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", "author": { diff --git a/packages/design-system/src/css/_primitives.scss b/packages/design-system/src/css/_primitives.scss index d45b5785303fd..77e3a4e30ca4f 100644 --- a/packages/design-system/src/css/_primitives.scss +++ b/packages/design-system/src/css/_primitives.scss @@ -46,6 +46,12 @@ var(--prim-color-primary-s), var(--prim-color-primary-l) ); + --prim-color-primary-alpha-010: hsla( + var(--prim-color-primary-h), + var(--prim-color-primary-s), + var(--prim-color-primary-l), + 0.1 + ); --prim-color-primary-tint-100: hsl( var(--prim-color-primary-h), var(--prim-color-primary-s), @@ -93,6 +99,12 @@ var(--prim-color-secondary-l), 0.25 ); + --prim-color-secondary-alpha-010: hsla( + var(--prim-color-secondary-h), + var(--prim-color-secondary-s), + var(--prim-color-secondary-l), + 0.1 + ); --prim-color-secondary-tint-100: hsl( var(--prim-color-secondary-h), var(--prim-color-secondary-s), @@ -140,6 +152,12 @@ var(--prim-color-alt-a-l), 0.25 ); + --prim-color-alt-a-alpha-015: hsl( + var(--prim-color-alt-a-h), + var(--prim-color-alt-a-s), + var(--prim-color-alt-a-l), + 0.15 + ); --prim-color-alt-a-tint-300: hsl( var(--prim-color-alt-a-h), var(--prim-color-alt-a-s), diff --git a/packages/design-system/src/css/_tokens.dark.scss b/packages/design-system/src/css/_tokens.dark.scss index 83705187de5ba..6f85ffb752237 100644 --- a/packages/design-system/src/css/_tokens.dark.scss +++ b/packages/design-system/src/css/_tokens.dark.scss @@ -131,6 +131,8 @@ // NDV --color-run-data-background: var(--prim-gray-800); --color-ndv-droppable-parameter: var(--prim-color-primary); + --color-ndv-droppable-parameter-background: var(--prim-color-primary-alpha-010); + --color-ndv-droppable-parameter-active-background: var(--prim-color-alt-a-alpha-015); --color-ndv-back-font: var(--prim-gray-0); --color-ndv-ouptut-error-font: var(--prim-color-alt-c-tint-150); @@ -174,6 +176,9 @@ // Action Dropdown --color-action-dropdown-item-active-background: var(--color-background-xlight); + // Input Triple + --color-background-input-triple: var(--prim-gray-800); + // Various --color-info-tint-1: var(--prim-gray-420); --color-info-tint-2: var(--prim-gray-740); diff --git a/packages/design-system/src/css/_tokens.scss b/packages/design-system/src/css/_tokens.scss index 113add69eef8d..3c20089511182 100644 --- a/packages/design-system/src/css/_tokens.scss +++ b/packages/design-system/src/css/_tokens.scss @@ -201,6 +201,8 @@ // NDV --color-run-data-background: var(--color-background-base); --color-ndv-droppable-parameter: var(--color-secondary); + --color-ndv-droppable-parameter-background: var(--prim-color-secondary-alpha-010); + --color-ndv-droppable-parameter-active-background: var(--prim-color-alt-a-alpha-015); --color-ndv-back-font: var(--prim-gray-0); --color-ndv-ouptut-error-font: var(--prim-color-alt-c); @@ -253,6 +255,9 @@ // Feature Request --color-feature-request-font: var(--prim-gray-0); + // Input Triple + --color-background-input-triple: var(--color-background-light); + // Various --color-avatar-accent-1: var(--prim-gray-120); --color-avatar-accent-2: var(--prim-color-alt-e-shade-100); diff --git a/packages/design-system/src/css/input-number.scss b/packages/design-system/src/css/input-number.scss index 0fef6a703ff94..e39c66c09081b 100644 --- a/packages/design-system/src/css/input-number.scss +++ b/packages/design-system/src/css/input-number.scss @@ -96,7 +96,7 @@ } @include mixins.m(small) { - line-height: #{var.$input-small-height - 2}; + line-height: #{var.$input-small-height - 4}; @include mixins.e((increase, decrease)) { width: var.$input-small-height; diff --git a/packages/editor-ui/package.json b/packages/editor-ui/package.json index fd322f5bcce4c..679ef383b9876 100644 --- a/packages/editor-ui/package.json +++ b/packages/editor-ui/package.json @@ -1,6 +1,6 @@ { "name": "n8n-editor-ui", - "version": "1.26.0", + "version": "1.28.0", "description": "Workflow Editor UI for n8n", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://n8n.io", diff --git a/packages/editor-ui/src/Interface.ts b/packages/editor-ui/src/Interface.ts index 8084fdbd08901..6fd65e1cf7a2f 100644 --- a/packages/editor-ui/src/Interface.ts +++ b/packages/editor-ui/src/Interface.ts @@ -1235,8 +1235,8 @@ export interface NDVState { isDragging: boolean; type: string; data: string; - activeTargetId: string | null; - stickyPosition: null | XYPosition; + dimensions: DOMRect | null; + activeTarget: { id: string; stickyPosition: null | XYPosition } | null; }; isMappingOnboarded: boolean; } diff --git a/packages/editor-ui/src/components/AssignmentCollection/Assignment.vue b/packages/editor-ui/src/components/AssignmentCollection/Assignment.vue new file mode 100644 index 0000000000000..1d5d166fba036 --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/Assignment.vue @@ -0,0 +1,264 @@ + + + + + diff --git a/packages/editor-ui/src/components/AssignmentCollection/AssignmentCollection.vue b/packages/editor-ui/src/components/AssignmentCollection/AssignmentCollection.vue new file mode 100644 index 0000000000000..7336684f6c8a7 --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/AssignmentCollection.vue @@ -0,0 +1,270 @@ + + + + + diff --git a/packages/editor-ui/src/components/AssignmentCollection/TypeSelect.vue b/packages/editor-ui/src/components/AssignmentCollection/TypeSelect.vue new file mode 100644 index 0000000000000..2a646a064fa1e --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/TypeSelect.vue @@ -0,0 +1,68 @@ + + + + + diff --git a/packages/editor-ui/src/components/AssignmentCollection/__tests__/Assignment.test.ts b/packages/editor-ui/src/components/AssignmentCollection/__tests__/Assignment.test.ts new file mode 100644 index 0000000000000..6d2f438afe855 --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/__tests__/Assignment.test.ts @@ -0,0 +1,54 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import { createTestingPinia } from '@pinia/testing'; +import userEvent from '@testing-library/user-event'; +import Assignment from '../Assignment.vue'; + +const DEFAULT_SETUP = { + pinia: createTestingPinia(), + props: { + path: 'parameters.fields.0', + modelValue: { + name: '', + type: 'string', + value: '', + }, + issues: [], + }, +}; + +const renderComponent = createComponentRenderer(Assignment, DEFAULT_SETUP); + +describe('Assignment.vue', () => { + afterEach(() => { + vi.clearAllMocks(); + }); + + it('can edit name, type and value', async () => { + const { getByTestId, baseElement, emitted } = renderComponent(); + + const nameField = getByTestId('assignment-name').querySelector('input') as HTMLInputElement; + const valueField = getByTestId('assignment-value').querySelector('input') as HTMLInputElement; + + expect(getByTestId('assignment')).toBeInTheDocument(); + expect(getByTestId('assignment-name')).toBeInTheDocument(); + expect(getByTestId('assignment-value')).toBeInTheDocument(); + expect(getByTestId('assignment-type-select')).toBeInTheDocument(); + + await userEvent.type(nameField, 'New name'); + await userEvent.type(valueField, 'New value'); + + await userEvent.click(baseElement.querySelectorAll('.option')[3]); + + expect(emitted('update:model-value')[0]).toEqual([ + { name: 'New name', type: 'array', value: 'New value' }, + ]); + }); + + it('can remove itself', async () => { + const { getByTestId, emitted } = renderComponent(); + + await userEvent.click(getByTestId('assignment-remove')); + + expect(emitted('remove')).toEqual([[]]); + }); +}); diff --git a/packages/editor-ui/src/components/AssignmentCollection/__tests__/AssignmentCollection.test.ts b/packages/editor-ui/src/components/AssignmentCollection/__tests__/AssignmentCollection.test.ts new file mode 100644 index 0000000000000..fb425a6bc0617 --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/__tests__/AssignmentCollection.test.ts @@ -0,0 +1,121 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import { useNDVStore } from '@/stores/ndv.store'; +import { createTestingPinia } from '@pinia/testing'; +import userEvent from '@testing-library/user-event'; +import { fireEvent, within } from '@testing-library/vue'; +import * as workflowHelpers from '@/mixins/workflowHelpers'; +import AssignmentCollection from '../AssignmentCollection.vue'; +import { createPinia, setActivePinia } from 'pinia'; + +const DEFAULT_SETUP = { + pinia: createTestingPinia(), + props: { + path: 'parameters.fields', + node: { + parameters: {}, + id: 'f63efb2d-3cc5-4500-89f9-b39aab19baf5', + name: 'Edit Fields', + type: 'n8n-nodes-base.set', + typeVersion: 3.3, + position: [1120, 380], + credentials: {}, + disabled: false, + }, + parameter: { name: 'fields', displayName: 'Fields To Set' }, + value: {}, + }, +}; + +const renderComponent = createComponentRenderer(AssignmentCollection, DEFAULT_SETUP); + +const getInput = (e: HTMLElement): HTMLInputElement => { + return e.querySelector('input') as HTMLInputElement; +}; + +const getAssignmentType = (assignment: HTMLElement): string => { + return getInput(within(assignment).getByTestId('assignment-type-select')).value; +}; + +async function dropAssignment({ + key, + value, + dropArea, +}: { + key: string; + value: unknown; + dropArea: HTMLElement; +}): Promise { + useNDVStore().draggableStartDragging({ + type: 'mapping', + data: `{{ $json.${key} }}`, + dimensions: null, + }); + + vitest.spyOn(workflowHelpers, 'resolveParameter').mockReturnValueOnce(value as never); + + await userEvent.hover(dropArea); + await fireEvent.mouseUp(dropArea); +} + +describe('AssignmentCollection.vue', () => { + afterEach(() => { + vi.clearAllMocks(); + }); + + it('renders empty state properly', async () => { + const { getByTestId, queryByTestId } = renderComponent(); + expect(getByTestId('assignment-collection-fields')).toBeInTheDocument(); + expect(getByTestId('assignment-collection-fields')).toHaveClass('empty'); + expect(getByTestId('assignment-collection-drop-area')).toHaveTextContent( + 'Drag input fields here', + ); + expect(queryByTestId('assignment')).not.toBeInTheDocument(); + }); + + it('can add and remove assignments', async () => { + const { getByTestId, findAllByTestId } = renderComponent(); + + await userEvent.click(getByTestId('assignment-collection-drop-area')); + await userEvent.click(getByTestId('assignment-collection-drop-area')); + + let assignments = await findAllByTestId('assignment'); + + expect(assignments.length).toEqual(2); + + await userEvent.type(getInput(within(assignments[1]).getByTestId('assignment-name')), 'second'); + await userEvent.type( + getInput(within(assignments[1]).getByTestId('assignment-value')), + 'secondValue', + ); + await userEvent.click(within(assignments[0]).getByTestId('assignment-remove')); + + assignments = await findAllByTestId('assignment'); + expect(assignments.length).toEqual(1); + expect(getInput(within(assignments[0]).getByTestId('assignment-value'))).toHaveValue( + 'secondValue', + ); + }); + + it('can add assignments by drag and drop (and infer type)', async () => { + const pinia = createPinia(); + setActivePinia(pinia); + + const { getByTestId, findAllByTestId } = renderComponent({ pinia }); + const dropArea = getByTestId('assignment-collection-drop-area'); + + await dropAssignment({ key: 'boolKey', value: true, dropArea }); + await dropAssignment({ key: 'stringKey', value: 'stringValue', dropArea }); + await dropAssignment({ key: 'numberKey', value: 25, dropArea }); + await dropAssignment({ key: 'objectKey', value: {}, dropArea }); + await dropAssignment({ key: 'arrayKey', value: [], dropArea }); + + let assignments = await findAllByTestId('assignment'); + + expect(assignments.length).toBe(5); + expect(getAssignmentType(assignments[0])).toEqual('Boolean'); + expect(getAssignmentType(assignments[1])).toEqual('String'); + expect(getAssignmentType(assignments[2])).toEqual('Number'); + expect(getAssignmentType(assignments[3])).toEqual('Object'); + expect(getAssignmentType(assignments[4])).toEqual('Array'); + }); +}); diff --git a/packages/editor-ui/src/components/AssignmentCollection/__tests__/TypeSelect.test.ts b/packages/editor-ui/src/components/AssignmentCollection/__tests__/TypeSelect.test.ts new file mode 100644 index 0000000000000..78f4b683c6a85 --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/__tests__/TypeSelect.test.ts @@ -0,0 +1,41 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import { createTestingPinia } from '@pinia/testing'; +import userEvent from '@testing-library/user-event'; +import TypeSelect from '../TypeSelect.vue'; + +const DEFAULT_SETUP = { + pinia: createTestingPinia(), + props: { + modelValue: 'boolean', + }, +}; + +const renderComponent = createComponentRenderer(TypeSelect, DEFAULT_SETUP); + +describe('TypeSelect.vue', () => { + afterEach(() => { + vi.clearAllMocks(); + }); + + it('renders default state correctly and emit events', async () => { + const { getByTestId, baseElement, emitted } = renderComponent(); + expect(getByTestId('assignment-type-select')).toBeInTheDocument(); + + await userEvent.click( + getByTestId('assignment-type-select').querySelector('.select-trigger') as HTMLElement, + ); + + const options = baseElement.querySelectorAll('.option'); + expect(options.length).toEqual(5); + + expect(options[0]).toHaveTextContent('String'); + expect(options[1]).toHaveTextContent('Number'); + expect(options[2]).toHaveTextContent('Boolean'); + expect(options[3]).toHaveTextContent('Array'); + expect(options[4]).toHaveTextContent('Object'); + + await userEvent.click(options[2]); + + expect(emitted('update:model-value')).toEqual([['boolean']]); + }); +}); diff --git a/packages/editor-ui/src/components/AssignmentCollection/constants.ts b/packages/editor-ui/src/components/AssignmentCollection/constants.ts new file mode 100644 index 0000000000000..c448f96ce8dfa --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/constants.ts @@ -0,0 +1,7 @@ +export const ASSIGNMENT_TYPES = [ + { type: 'string', icon: 'font' }, + { type: 'number', icon: 'hashtag' }, + { type: 'boolean', icon: 'check-square' }, + { type: 'array', icon: 'list' }, + { type: 'object', icon: 'cube' }, +]; diff --git a/packages/editor-ui/src/components/AssignmentCollection/utils.ts b/packages/editor-ui/src/components/AssignmentCollection/utils.ts new file mode 100644 index 0000000000000..d4845aa59839e --- /dev/null +++ b/packages/editor-ui/src/components/AssignmentCollection/utils.ts @@ -0,0 +1,61 @@ +import { isObject } from 'lodash-es'; +import type { AssignmentValue, IDataObject } from 'n8n-workflow'; +import { resolveParameter } from '@/mixins/workflowHelpers'; +import { v4 as uuid } from 'uuid'; + +export function nameFromExpression(expression: string): string { + return expression.replace(/^{{\s*|\s*}}$/g, '').replace('$json.', ''); +} + +export function inferAssignmentType(value: unknown): string { + if (typeof value === 'boolean') return 'boolean'; + if (typeof value === 'number') return 'number'; + if (typeof value === 'string') return 'string'; + if (Array.isArray(value)) return 'array'; + if (isObject(value)) return 'object'; + return 'string'; +} + +export function typeFromExpression(expression: string): string { + try { + const resolved = resolveParameter(`=${expression}`); + return inferAssignmentType(resolved); + } catch (error) { + return 'string'; + } +} + +export function inputDataToAssignments(input: IDataObject): AssignmentValue[] { + const assignments: AssignmentValue[] = []; + + function processValue(value: IDataObject, path: Array = []) { + if (Array.isArray(value)) { + value.forEach((element, index) => { + processValue(element, [...path, index]); + }); + } else if (isObject(value)) { + for (const [key, objectValue] of Object.entries(value)) { + processValue(objectValue as IDataObject, [...path, key]); + } + } else { + const stringPath = path.reduce((fullPath: string, part) => { + if (typeof part === 'number') { + return `${fullPath}[${part}]`; + } + return `${fullPath}.${part}`; + }, '$json'); + + const expression = `={{ ${stringPath} }}`; + assignments.push({ + id: uuid(), + name: stringPath.replace('$json.', ''), + value: expression, + type: inferAssignmentType(value), + }); + } + } + + processValue(input); + + return assignments; +} diff --git a/packages/editor-ui/src/components/CollectionParameter.vue b/packages/editor-ui/src/components/CollectionParameter.vue index 913271d28f379..16b5ac6b8243f 100644 --- a/packages/editor-ui/src/components/CollectionParameter.vue +++ b/packages/editor-ui/src/components/CollectionParameter.vue @@ -49,7 +49,7 @@ diff --git a/packages/editor-ui/src/components/DropArea/DropArea.vue b/packages/editor-ui/src/components/DropArea/DropArea.vue new file mode 100644 index 0000000000000..a6129983a77da --- /dev/null +++ b/packages/editor-ui/src/components/DropArea/DropArea.vue @@ -0,0 +1,53 @@ + + + + + diff --git a/packages/editor-ui/src/components/DropArea/__tests__/DropArea.test.ts b/packages/editor-ui/src/components/DropArea/__tests__/DropArea.test.ts new file mode 100644 index 0000000000000..d9c8371d72479 --- /dev/null +++ b/packages/editor-ui/src/components/DropArea/__tests__/DropArea.test.ts @@ -0,0 +1,40 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import { useNDVStore } from '@/stores/ndv.store'; +import { createTestingPinia } from '@pinia/testing'; +import userEvent from '@testing-library/user-event'; +import { fireEvent } from '@testing-library/vue'; +import { createPinia, setActivePinia } from 'pinia'; +import DropArea from '../DropArea.vue'; + +const renderComponent = createComponentRenderer(DropArea, { + pinia: createTestingPinia(), +}); + +async function fireDrop(dropArea: HTMLElement): Promise { + useNDVStore().draggableStartDragging({ + type: 'mapping', + data: '{{ $json.something }}', + dimensions: null, + }); + + await userEvent.hover(dropArea); + await fireEvent.mouseUp(dropArea); +} + +describe('DropArea.vue', () => { + afterEach(() => { + vi.clearAllMocks(); + }); + + it('renders default state correctly and emits drop events', async () => { + const pinia = createPinia(); + setActivePinia(pinia); + + const { getByTestId, emitted } = renderComponent({ pinia }); + expect(getByTestId('drop-area')).toBeInTheDocument(); + + await fireDrop(getByTestId('drop-area')); + + expect(emitted('drop')).toEqual([['{{ $json.something }}']]); + }); +}); diff --git a/packages/editor-ui/src/components/ExpressionParameterInput.vue b/packages/editor-ui/src/components/ExpressionParameterInput.vue index be5a0f7ef81be..6616f0d4595c6 100644 --- a/packages/editor-ui/src/components/ExpressionParameterInput.vue +++ b/packages/editor-ui/src/components/ExpressionParameterInput.vue @@ -4,16 +4,22 @@ :class="$style['expression-parameter-input']" @keydown.tab="onBlur" > -
+
- + = +
import type { IUpdateInformation } from '@/Interface'; +import InputTriple from '@/components/InputTriple/InputTriple.vue'; import ParameterInputFull from '@/components/ParameterInputFull.vue'; import ParameterIssues from '@/components/ParameterIssues.vue'; import { useI18n } from '@/composables/useI18n'; +import { resolveParameter } from '@/mixins/workflowHelpers'; import { DateTime } from 'luxon'; import { + FilterError, executeFilterCondition, - type FilterOptionsValue, + validateFieldType, type FilterConditionValue, type FilterOperatorType, + type FilterOptionsValue, type INodeProperties, type NodeParameterValue, - type NodePropertyTypes, - FilterError, - validateFieldType, } from 'n8n-workflow'; import { computed, ref } from 'vue'; import OperatorSelect from './OperatorSelect.vue'; import { OPERATORS_BY_ID, type FilterOperatorId } from './constants'; import type { FilterOperator } from './types'; -import { resolveParameter } from '@/mixins/workflowHelpers'; type ConditionResult = | { status: 'resolve_error' } | { status: 'validation_error'; error: string } @@ -58,15 +58,24 @@ const operatorId = computed(() => { }); const operator = computed(() => OPERATORS_BY_ID[operatorId.value] as FilterOperator); -const operatorTypeToNodePropType = (operatorType: FilterOperatorType): NodePropertyTypes => { +const operatorTypeToNodeProperty = ( + operatorType: FilterOperatorType, +): Pick => { switch (operatorType) { + case 'boolean': + return { + type: 'options', + options: [ + { name: 'true', value: true }, + { name: 'false', value: false }, + ], + }; case 'array': case 'object': - case 'boolean': case 'any': - return 'string'; + return { type: 'string' }; default: - return operatorType; + return { type: operatorType }; } }; @@ -119,7 +128,7 @@ const leftParameter = computed(() => ({ operator.value.type === 'dateTime' ? now.value : i18n.baseText('filter.condition.placeholderLeft'), - type: operatorTypeToNodePropType(operator.value.type), + ...operatorTypeToNodeProperty(operator.value.type), })); const rightParameter = computed(() => ({ @@ -130,7 +139,7 @@ const rightParameter = computed(() => ({ operator.value.type === 'dateTime' ? now.value : i18n.baseText('filter.condition.placeholderRight'), - type: operatorTypeToNodePropType(operator.value.rightType ?? operator.value.type), + ...operatorTypeToNodeProperty(operator.value.type), })); const onLeftValueChange = (update: IUpdateInformation): void => { @@ -144,9 +153,11 @@ const onRightValueChange = (update: IUpdateInformation): void => { const convertToType = (value: unknown, type: FilterOperatorType): unknown => { if (type === 'any') return value; + const fallback = type === 'boolean' ? false : value; + return ( validateFieldType('filter', condition.value.leftValue, type, { parseStrings: true }).newValue ?? - value + fallback ); }; @@ -202,64 +213,53 @@ const onBlur = (): void => { :class="$style.remove" @click="onRemove" > - - + + + + diff --git a/packages/editor-ui/src/components/InputTriple/__tests__/InputTriple.test.ts b/packages/editor-ui/src/components/InputTriple/__tests__/InputTriple.test.ts new file mode 100644 index 0000000000000..a29373ec25583 --- /dev/null +++ b/packages/editor-ui/src/components/InputTriple/__tests__/InputTriple.test.ts @@ -0,0 +1,38 @@ +import { createComponentRenderer } from '@/__tests__/render'; +import InputTriple from '../InputTriple.vue'; + +const renderComponent = createComponentRenderer(InputTriple); + +describe('InputTriple.vue', () => { + afterEach(() => { + vi.clearAllMocks(); + }); + + it('renders layout correctly', async () => { + const { container } = renderComponent({ + props: { middleWidth: '200px' }, + slots: { + left: '
left
', + middle: '
middle
', + right: '
right
', + }, + }); + + expect(container.querySelector('.triple')).toBeInTheDocument(); + expect(container.querySelectorAll('.item')).toHaveLength(3); + expect(container.querySelector('.middle')).toHaveStyle('flex-basis: 200px'); + }); + + it('does not render missing slots', async () => { + const { container } = renderComponent({ + props: { middleWidth: '200px' }, + slots: { + left: '
left
', + middle: '
middle
', + }, + }); + + expect(container.querySelector('.triple')).toBeInTheDocument(); + expect(container.querySelectorAll('.item')).toHaveLength(2); + }); +}); diff --git a/packages/editor-ui/src/components/Node.vue b/packages/editor-ui/src/components/Node.vue index 33ef613e39f41..8d23b7e8b4e89 100644 --- a/packages/editor-ui/src/components/Node.vue +++ b/packages/editor-ui/src/components/Node.vue @@ -106,24 +106,6 @@ />
-
- - -
+
+ + + + +
+
@@ -437,12 +467,10 @@ export default defineComponent({ } return issues; }, - nodeDisabledIcon(): string { - if (this.data.disabled === false) { - return 'pause'; - } else { - return 'play'; - } + nodeDisabledTitle(): string { + return this.data.disabled + ? this.$locale.baseText('node.enable') + : this.$locale.baseText('node.disable'); }, position(): XYPosition { return this.node ? this.node.position : [0, 0]; @@ -680,6 +708,7 @@ export default defineComponent({ }); } }, + executeNode() { this.$emit('runWorkflow', this.data.name, 'Node.executeNode'); this.$telemetry.track('User clicked node hover button', { @@ -689,6 +718,25 @@ export default defineComponent({ }); }, + deleteNode() { + this.$telemetry.track('User clicked node hover button', { + node_type: this.data.type, + button_name: 'delete', + workflow_id: this.workflowsStore.workflowId, + }); + + this.$emit('removeNode', this.data.name); + }, + + toggleDisableNode() { + this.$telemetry.track('User clicked node hover button', { + node_type: this.data.type, + button_name: 'disable', + workflow_id: this.workflowsStore.workflowId, + }); + this.$emit('toggleDisableNode', this.data); + }, + onClick(event: MouseEvent) { void this.callDebounced(this.onClickDebounced, { debounceTime: 50, trailing: true }, event); }, @@ -778,6 +826,42 @@ export default defineComponent({ } } + &.touch-active, + &:hover, + &.menu-open { + .node-options { + opacity: 1; + } + } + + .node-options { + :deep(.button) { + --button-font-color: var(--color-text-light); + --button-border-radius: 0; + } + cursor: default; + position: absolute; + bottom: 100%; + z-index: 11; + min-width: 100%; + display: flex; + left: calc(-1 * var(--spacing-4xs)); + right: calc(-1 * var(--spacing-4xs)); + justify-content: center; + align-items: center; + padding-bottom: var(--spacing-2xs); + font-size: var(--font-size-s); + opacity: 0; + transition: opacity 100ms ease-in; + + &-inner { + display: flex; + align-items: center; + background-color: var(--color-canvas-background); + border-radius: var(--border-radius-base); + } + } + .node-default { position: absolute; width: 100%; @@ -803,15 +887,6 @@ export default defineComponent({ } } - &.touch-active, - &:hover, - &.menu-open { - .node-options { - pointer-events: all; - opacity: 1; - } - } - .node-executing-info { display: none; position: absolute; @@ -868,65 +943,6 @@ export default defineComponent({ .waiting { color: var(--color-secondary); } - - .node-options { - --node-options-height: 26px; - :deep(.button) { - --button-font-color: var(--color-text-light); - } - position: absolute; - display: flex; - align-items: center; - justify-content: space-between; - gap: var(--spacing-2xs); - transition: opacity 100ms ease-in; - opacity: 0; - pointer-events: none; - top: calc(-1 * (var(--node-options-height) + var(--spacing-4xs))); - left: 0; - width: var(--node-width); - height: var(--node-options-height); - font-size: var(--font-size-s); - z-index: 10; - text-align: center; - - .option { - display: inline-block; - - &.touch { - display: none; - } - - &:hover { - color: $color-primary; - } - - .execute-icon { - position: relative; - font-size: var(----font-size-xl); - } - } - - &:after { - content: ''; - display: block; - position: absolute; - left: 0; - right: 0; - top: -1rem; - bottom: -1rem; - z-index: -1; - } - } - - &.is-touch-device .node-options { - left: -25px; - width: 150px; - - .option.touch { - display: initial; - } - } } &--config { @@ -935,20 +951,12 @@ export default defineComponent({ --node-height: 75px; .node-default { - .node-options { - background: color-mix(in srgb, var(--color-canvas-background) 80%, transparent); - height: 25px; - } - .node-icon { scale: 0.75; } - } - .node-default { .node-box { border: 2px solid var(--color-foreground-xdark); - //background-color: $node-background-type-other; border-radius: 50px; &.executing { @@ -1027,11 +1035,6 @@ export default defineComponent({ left: var(--configurable-node-icon-offset); } - .node-options { - left: 0; - height: 25px; - } - .node-executing-info { left: -67px; } @@ -1172,10 +1175,6 @@ export default defineComponent({ z-index: 100; } -.node-options { - z-index: 10; -} - .drop-add-node-label { z-index: 10; } diff --git a/packages/editor-ui/src/components/ParameterInput.vue b/packages/editor-ui/src/components/ParameterInput.vue index 54f36b073821e..b22139753d85c 100644 --- a/packages/editor-ui/src/components/ParameterInput.vue +++ b/packages/editor-ui/src/components/ParameterInput.vue @@ -46,7 +46,8 @@ :model-value="expressionDisplayValue" :title="displayTitle" :is-read-only="isReadOnly" - :is-single-line="isSingleLine" + :rows="rows" + :is-assignment="isAssignment" :path="path" :additional-expression-data="additionalExpressionData" :class="{ 'ph-no-capture': shouldRedactValue }" @@ -549,7 +550,11 @@ export default defineComponent({ isReadOnly: { type: Boolean, }, - isSingleLine: { + rows: { + type: Number, + default: 5, + }, + isAssignment: { type: Boolean, }, parameter: { @@ -1314,7 +1319,11 @@ export default defineComponent({ (!this.modelValue || this.modelValue === '[Object: null]') ) { this.valueChanged('={{ 0 }}'); - } else if (this.parameter.type === 'number' || this.parameter.type === 'boolean') { + } else if ( + this.parameter.type === 'number' || + this.parameter.type === 'boolean' || + typeof this.modelValue !== 'string' + ) { this.valueChanged(`={{ ${this.modelValue} }}`); } else { this.valueChanged(`=${this.modelValue}`); @@ -1345,7 +1354,6 @@ export default defineComponent({ // Strip the '=' from the beginning newValue = this.modelValue ? this.modelValue.toString().substring(1) : null; } - this.valueChanged(newValue); } } else if (command === 'refreshOptions') { @@ -1416,6 +1424,7 @@ export default defineComponent({ .droppable { --input-border-color: var(--color-ndv-droppable-parameter); + --input-border-right-color: var(--color-ndv-droppable-parameter); --input-border-style: dashed; textarea, @@ -1427,6 +1436,7 @@ export default defineComponent({ .activeDrop { --input-border-color: var(--color-success); + --input-border-right-color: var(--color-success); --input-background-color: var(--color-foreground-xlight); --input-border-style: solid; diff --git a/packages/editor-ui/src/components/ParameterInputFull.vue b/packages/editor-ui/src/components/ParameterInputFull.vue index d3a892e069859..370d130eee87f 100644 --- a/packages/editor-ui/src/components/ParameterInputFull.vue +++ b/packages/editor-ui/src/components/ParameterInputFull.vue @@ -49,7 +49,8 @@ :model-value="value" :path="path" :is-read-only="isReadOnly" - :is-single-line="isSingleLine" + :is-assignment="isAssignment" + :rows="rows" :droppable="droppable" :active-drop="activeDrop" :force-show-expression="forceShowExpression" @@ -140,7 +141,11 @@ export default defineComponent({ type: Boolean, default: false, }, - isSingleLine: { + rows: { + type: Number, + default: 5, + }, + isAssignment: { type: Boolean, default: false, }, @@ -387,6 +392,7 @@ export default defineComponent({ position: absolute; bottom: -22px; right: 0; + z-index: 1; opacity: 0; transition: opacity 100ms ease-in; diff --git a/packages/editor-ui/src/components/ParameterInputList.vue b/packages/editor-ui/src/components/ParameterInputList.vue index 3ecd0cd35cb36..0822aca9e218a 100644 --- a/packages/editor-ui/src/components/ParameterInputList.vue +++ b/packages/editor-ui/src/components/ParameterInputList.vue @@ -65,26 +65,38 @@ :underline="true" color="text-dark" /> - - - + + + + + + {{ $locale.baseText('parameterInputList.loadingError') }} + +
{ + if ( + !['FixedCollectionParameter', 'CollectionParameter'].includes( + component?.$options.name as string, + ) + ) { + return; + } + asyncLoadingError.value = true; + console.error(e); + window?.Sentry?.captureException(e, { + tags: { + asyncLoadingError: true, + }, + }); + // Don't propagate the error further + return false; + }); return { nodeHelpers, + asyncLoadingError, }; }, computed: { @@ -572,5 +617,10 @@ export default defineComponent({ font-weight: var(--font-weight-bold); } } + + .async-notice { + display: block; + padding: var(--spacing-3xs) 0; + } } diff --git a/packages/editor-ui/src/components/ParameterInputWrapper.vue b/packages/editor-ui/src/components/ParameterInputWrapper.vue index a2e98626a647e..b446bfe2e4145 100644 --- a/packages/editor-ui/src/components/ParameterInputWrapper.vue +++ b/packages/editor-ui/src/components/ParameterInputWrapper.vue @@ -7,6 +7,7 @@ :model-value="modelValue" :path="path" :is-read-only="isReadOnly" + :is-assignment="isAssignment" :droppable="droppable" :active-drop="activeDrop" :force-show-expression="forceShowExpression" @@ -15,10 +16,10 @@ :error-highlight="errorHighlight" :is-for-credential="isForCredential" :event-source="eventSource" - :expression-evaluated="expressionValueComputed" + :expression-evaluated="evaluatedExpressionValue" :additional-expression-data="resolvedAdditionalExpressionData" :label="label" - :is-single-line="isSingleLine" + :rows="rows" :data-test-id="`parameter-input-${parsedParameterName}`" :event-bus="eventBus" @focus="onFocus" @@ -45,28 +46,29 @@