From db055bb7aeed97a9547dc78f03165195abdab96f Mon Sep 17 00:00:00 2001 From: Denis Shelest Date: Tue, 1 Oct 2024 11:49:31 -0400 Subject: [PATCH] feat: resultProcessor (after rebase + resolve conflicts) (#2) (#117) feat: add resultProcessor --- .eslintrc.js | 26 +++--- index.js | 51 +++++++++--- package.json | 3 - readme.md | 102 ++++++++++++++---------- test/acceptance_test.js | 119 +++++++++++++++------------- test/config/mockProcess.testrail.js | 33 ++++++++ test/scenario.js | 6 +- 7 files changed, 214 insertions(+), 126 deletions(-) create mode 100644 test/config/mockProcess.testrail.js diff --git a/.eslintrc.js b/.eslintrc.js index 30410d9..2697f33 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,17 +1,17 @@ module.exports = { - 'env': { - 'browser': true, - 'es6': true, - 'node': true, - 'mocha': true + env: { + browser: true, + es6: true, + node: true, + mocha: true }, - 'extends': ['eslint:recommended', 'plugin:codeceptjs/recommended'], - 'parserOptions': { - 'sourceType': 'module', - 'ecmaVersion': 2020 + extends: ['eslint:recommended', 'plugin:codeceptjs/recommended'], + parserOptions: { + sourceType: 'module', + ecmaVersion: 2020 }, - 'rules': { - 'indent': [ + rules: { + indent: [ 'error', 'tab' ], @@ -19,11 +19,11 @@ module.exports = { 'error', 'unix' ], - 'quotes': [ + quotes: [ 'error', 'single' ], - 'semi': [ + semi: [ 'error', 'always' ], diff --git a/index.js b/index.js index b7b3549..f07ff3a 100644 --- a/index.js +++ b/index.js @@ -22,8 +22,10 @@ const defaultConfig = { passed: { status_id: 1 }, failed: { status_id: 5 }, }, + runId: undefined, closeTestRun: true, - version: '1' // this is the build version - OPTIONAL + version: '1', // this is the build version - OPTIONAL, + resultProcessor: undefined }; let helper; @@ -38,8 +40,13 @@ module.exports = (config) => { config = deepMerge (defaultConfig, config); output.showDebugLog(config.debugLog); - if (config.host === '' || config.user === '' || config.password === '') throw new Error('Please provide proper Testrail host or credentials'); + if (!config.host) throw new Error('Please provide proper Testrail host'); + if (!config.user) throw new Error('Please provide proper Testrail user'); + if (!config.password) throw new Error('Please provide proper Testrail password'); if (!config.projectId) throw new Error('Please provide project id in config file'); + if (config.resultProcessor && typeof config.resultProcessor !== 'function') { + throw new Error('Result processor (`resultProcessor` config option) has to be function'); + } const testrail = new TestRail(config); @@ -202,7 +209,7 @@ module.exports = (config) => { let config_ids = []; mergedTests.forEach(test => { - for (let [key, value] of Object.entries(test)) { + for (const [key, value] of Object.entries(test)) { if (key === 'case_id') { ids.push(value); } @@ -334,20 +341,40 @@ module.exports = (config) => { const allResults = passedTests.concat(failedTests.concat(skippedTest)); - // Before POST-ing the results, filter the array for any non-existing tags in TR test bucket assigned to this test run - // This is to avoid any failure to POST results due to labels in the results array not part of the test run - let validResults = []; - testrail.getCases(config.projectId, config.suiteId).then(res => { - if (res.length) { - validResults = allResults.filter(result => res.find(tag => tag.id == result.case_id)); - const missingLabels = allResults.filter(result => !validResults.find(vResult => vResult.case_id == result.case_id)); + testrail.getCases(config.projectId, config.suiteId).then(testCases => { + if (testCases.length) { + // Before POST-ing the results, filter the array for any non-existing tags in TR test bucket assigned to this test run + // This is to avoid any failure to POST results due to labels in the results array not part of the test run + const { validResults, missingLabels } = allResults.reduce( + (acc, testResult) => { + const testCase = testCases.find(it => it.id == testResult.case_id); + // If there is `resultProcessor` callback in config, then we need to process test result + const processedResult = config.resultProcessor + ? config.resultProcessor(testResult, { testCase, allResults, allTestCases: testCases }) + : testResult; + + if (processedResult) { + if (testCase) { + acc.validResults.push(processedResult); + } else { + acc.missingLabels.push(processedResult); + } + } + return acc; + }, + { validResults: [], missingLabels: [] } + ); + if (missingLabels.length) { output.error(`Error: some labels are missing from the test run and the results were not send through: ${JSON.stringify(missingLabels.map(l => l.case_id))}`); } + + return { validResults }; } - }).then(() => { + return { validResults: [] }; + }).then(({ validResults }) => { if (validResults.length) { - testrail.addResultsForCases(runId, { results: validResults }).then(res => { + testrail.addResultsForCases(runId, {results: validResults}).then(res => { output.log(`The run ${runId} is updated with ${JSON.stringify(res)}`); for (const test of failedTests) { diff --git a/package.json b/package.json index 9de1221..04fa46d 100644 --- a/package.json +++ b/package.json @@ -39,10 +39,7 @@ "chalk": "4.0.0", "codeceptjs": "3.5.8", "eslint": "8.2.0", - "eslint-config-airbnb-base": "15.0.0", "eslint-plugin-codeceptjs": "^1.3.0", - "eslint-plugin-import": "2.29.0", - "expect": "26.0.0", "json-server": "0.17.0", "mocha": "10.2.0", "semantic-release": "21.0.1" diff --git a/readme.md b/readme.md index 13c4fa5..da105c2 100644 --- a/readme.md +++ b/readme.md @@ -8,7 +8,7 @@ [![Tests](https://github.com/kobenguyent/codeceptjs-testrail/actions/workflows/run-tests.yml/badge.svg)](https://github.com/kobenguyent/codeceptjs-testrail/actions/workflows/run-tests.yml) -##### Introduction +## Introduction Testrail CodeceptJS Integration. The test run is created automatically after the test execution. The screenshots of failed tests are also attached to test results. @@ -17,9 +17,9 @@ Testrail CodeceptJS Integration. The test run is created automatically after the New feature, add the configuration to test run of test plan ![Attachemnt for failed case](http://g.recordit.co/uQLvQUq7cT.gif) -##### Requirement +## Requirement -To use this custom plugin +Install to use this custom plugin ```sh npm i codeceptjs-testrail --save @@ -29,29 +29,29 @@ npm i codeceptjs-testrail --save - You should provide the test case id to make it works, otherwise, this plugin has no clue which case id to be added to test run on Testrail. -```sh -npx codeceptjs run-workers 3 -``` - -An example: +**An example:** ```js -... +// ... Scenario('Search function is displayed @C12345', ({I, homePage}) => { I.seeElement(homePage.searchTextbox); I.seeElement(homePage.searchButton); }); -... +// ... ``` -**Data driven tests** +```sh +npx codeceptjs run-workers 3 +``` + +#### Data driven tests If you want to have different Data-driven test cases with different IDs in Testrail for each iteration of the test you will need to populate the Data object with your a tag. This works because CodeceptJS extracts tags from test names, and data for Data-driven tests is populated in the test name. -An example: +**An example:** ```js -... +// ... let accounts = new DataTable(['testRailTag', 'user', 'password']); accounts.add(['@C12345', 'davert', '123456']); // add a tag for each user along with their test data accounts.add(['@C45678', 'admin', '123456']); @@ -62,10 +62,10 @@ An example: I.click('Sign In'); I.see('Welcome '+ current.login); }); -... +// ... ``` -A Gherkin example: +**A Gherkin example:** ```gherkin @smoke @@ -77,6 +77,7 @@ A Gherkin example: ``` **Note:** + TestRail tag in **Examples** from **Scenario Outline** available from version `1.7.4` and above ```gherkin @@ -91,13 +92,14 @@ TestRail tag in **Examples** from **Scenario Outline** available from version `1 | @C1235 | someText2 | ``` -##### Configuration +## Configuration Add this plugin to config file: ```js -... +// ... plugins: { + // ... testrail: { require: 'codeceptjs-testrail', host: 'https://kobenguyent.testrail.io', @@ -117,35 +119,53 @@ plugins: { configName: 'leopard' }, testCase: { - passed: { status_id: 1, comment: 'This is passed on build 123' }, - failed: { status_id: 5, comment: 'This is failed on build 123' }, - } + passed: { status_id: 1, comment: 'This is passed on build 123' }, + failed: { status_id: 5, comment: 'This is failed on build 123' }, + }, enabled: true, closeTestRun: true, skipInfo: { - message: "Skipped due to failure in 'before' hook" + message: "Skipped due to failure in 'before' hook" + }, + version: '1', + resultProcessor: (testResult, { testCase, allResults, allTestCases }) => { + if (testResult.status_id == 7) { + // do not publish + return null; + } + const additionFields = {}; + if (testResult.status_id == 1) { + additionFields.custom_testrail_field_1 = 'OK'; + } + return { ...testResult, ...additionFields, custom_testrail_field_222: 2 }; } } + // ... } -... +// ... ``` -**Possible config options:** - - -| config name | required | Description | -| ----------------------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| suiteId | yes | when your project is not under the single-suite mode,`suiteId` is needed. When you don't provide the `suiteId`, the first `suiteId` will be used as default. | -| projectId | yes | The project Id which is from the Testrail. This should be provided to make this plugin works | -| prefixTag | no | by default it is set to`'@C'` | -| runName | no | your desired test run name. If you done provide this test run name, default test run name is as`This is a new test run on ${dd/mm/yyy H:M}` which is current day. | -| runId | no | provide the existing run Id when you want to update the existing one instead of creating new testrun. | -| plan - existingPlanId | no | if you provide an existing plan ID, the new test run is added to that test plan. Otherwise, new test plan is created and new test run is added to that test plan. | -| plan - name | no | your desired plan name. | -| plan - description | no | your desired description to your test plan. | -| plan - onlyCaseIds | no | if`true` it will consider only test cases that actually run while posting results to testrail | -| testCase | no | if you configured testrail to use custom test case statuses, you can override default status_id with yours, or your custom comment. | -| configuration | no | provide the created configuration group name - configuration name that you want to add to the test run. If you don't provide anything or wrong either group name or config name, there will be no configuration added to test run. | -| debugLog | no | show more logs for debugging purposes. | -| closeTestRun | no | if you wish to close the test run afterwards,by default test run is not closed afterwards. | -| skipInfo - message | no | Message to comment for skipped cases | +### Possible config options: + +| config name | required | Description | +|-----------------------| -------- |-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| suiteId | yes | When your project is not under the single-suite mode,`suiteId` is needed. When you don't provide the `suiteId`, the first `suiteId` will be used as default. | +| projectId | yes | The project Id which is from the Testrail. This should be provided to make this plugin works | +| prefixTag | no | By default it is set to`'@C'` | +| runName | no | Your desired test run name. If you done provide this test run name, default test run name is as`This is a new test run on ${dd/mm/yyy H:M}` which is current day. | +| runId | no | Provide the existing run Id when you want to update the existing one instead of creating new testrun. | +| plan - existingPlanId | no | If you provide an existing plan ID, the new test run is added to that test plan. Otherwise, new test plan is created and new test run is added to that test plan. | +| plan - name | no | Your desired plan name. | +| plan - description | no | Your desired description to your test plan. | +| plan - onlyCaseIds | no | If `true` it will consider only test cases that actually run while posting results to testrail | +| testCase | no | If you configured testrail to use custom test case statuses, you can override default status_id with yours, or your custom comment. | +| configuration | no | Provide the created configuration group name - configuration name that you want to add to the test run. If you don't provide anything or wrong either group name or config name, there will be no configuration added to test run. | +| debugLog | no | Show more logs for debugging purposes. | +| closeTestRun | no | If you wish to close the test run afterwards,by default test run is not closed afterwards. | +| skipInfo - message | no | Message to comment for skipped cases | +| version | no | Build version. E.g. `version: packageJson.version` | +| resultProcessor | no | `function(testResult, { testCase, allResults, allTestCases })`.
It is expected to return test result object (you could add/replace in this object any required [custom fields](https://support.testrail.com/hc/en-us/articles/7373850291220-Configuring-custom-fields) to follow your [Testrail configuration](https://support.testrail.com/hc/en-us/articles/7077871398036-Result-Fields)).
If this function returns `null` then this result will not be sent to Testrail. | + +## Contributing + +[Read here](./.github/CONTRIBUTING.md) \ No newline at end of file diff --git a/test/acceptance_test.js b/test/acceptance_test.js index 97ee9f1..1b1898d 100644 --- a/test/acceptance_test.js +++ b/test/acceptance_test.js @@ -1,83 +1,78 @@ const { exec } = require('child_process'); const { expect } = require('chai'); -const runner = './node_modules/.bin/codeceptjs run'; -const mockTestrailConfig = './test/config/mock.testrail.js'; +const path = require('path'); + const testrailPlugin = require('../index.js'); +const runner = `${path.resolve('./node_modules/.bin/codeceptjs')} run`; +const mockTestrailConfigs = { + general: './test/config/mock.testrail.js', + processor: './test/config/mockProcess.testrail.js' +}; + + describe('Incomplete info', () => { + const mainConfig = { + require: '../../index.js', + host: 'https://peterngtr1.testrail.io', + user: 'user', + password: 'pass', + suiteId: 1, + projectId: '99', + runName: 'Custom run name', + enabled: true + }; + describe('Missing host', () => { - it('should return error', () => { - try { - testrailPlugin(); - } catch (e) { - expect(e.message).contain('Please provide proper Testrail host or credentials'); - } + it('should throw error', () => { + expect(() => testrailPlugin()) + .to.throw('Please provide proper Testrail host'); }); }); describe('Missing user', () => { - it('should rerun error', () => { - try { - testrailPlugin({ - require: '../../index.js', - host: 'https://peterngtr1.testrail.io', - user: '', - password: 'pass', - suiteId: 1, - projectId: 1, - runName: 'Custom run name', - enabled: true - }); - } catch (e) { - expect(e.message).contain('Please provide proper Testrail host or credentials'); - } + it('should throw error', () => { + expect(() => testrailPlugin({ ...mainConfig, user: '' })) + .to.throw('Please provide proper Testrail user'); }); }); describe('Missing password', () => { - it('should rerun error', () => { - try { - testrailPlugin({ - require: '../../index.js', - host: 'https://peterngtr1.testrail.io', - user: 'user', - password: '', - suiteId: 1, - projectId: 1, - runName: 'Custom run name', - enabled: true - }); - } catch (e) { - expect(e.message).contain('Please provide proper Testrail host or credentials'); - } + it('should throw error', () => { + expect(() => testrailPlugin({ ...mainConfig, password: '' })) + .to.throw('Please provide proper Testrail password'); }); }); describe('Missing project id', () => { - it('should rerun error', () => { - try { - testrailPlugin({ - require: '../../index.js', - host: 'https://peterngtr1.testrail.io', - user: 'user', - password: 'pass', - suiteId: 1, - projectId: '', - runName: 'Custom run name', - enabled: true - }); - } catch (e) { - expect(e.message).contain('Please provide project id in config file'); - } + it('should throw error', () => { + expect(() => testrailPlugin({ ...mainConfig, projectId: '' })) + .to.throw('Please provide project id in config file'); }); }); + describe('Wrong resultProcessor value', () => { + it('should throw error for Number', () => { + expect(() => testrailPlugin({ ...mainConfig, resultProcessor: 1 })) + .to.throw('Result processor (`resultProcessor` config option) has to be function'); + }); + + it('should throw error for String', () => { + expect(() => testrailPlugin({ ...mainConfig, resultProcessor: 'string value' })) + .to.throw('Result processor (`resultProcessor` config option) has to be function'); + }); + + it('should throw error for Object', () => { + expect(() => testrailPlugin({ ...mainConfig, resultProcessor: {} })) + .to.throw('Result processor (`resultProcessor` config option) has to be function'); + }); + }); }); describe('Valid config file', () => { describe('Add run and test result', () => { it('should update the results on passed case', (done) => { - exec(`${runner} --grep @pass -c ${mockTestrailConfig}`, (err, stdout) => { + exec(`${runner} --grep "@pass" -c "${mockTestrailConfigs.general}"`, (err, stdout) => { expect(stdout).to.include('addRun: SUCCESS - the request data is {"suite_id":1,"name":"Custom run name","include_all":false}'); expect(stdout).to.include('addRun: SUCCESS - the response data is {"suite_id":1,"name":"Custom run name","include_all":false,"id":1}'); done(); @@ -85,12 +80,26 @@ describe('Valid config file', () => { }); it('should update the results on failed case', (done) => { - exec(`${runner} --grep @fail -c ${mockTestrailConfig}`, (err, stdout) => { + exec(`${runner} --grep "@fail" -c "${mockTestrailConfigs.general}"`, (err, stdout) => { expect(stdout).to.include('FAIL | 0 passed, 1 failed'); expect(stdout).to.include('addRun: SUCCESS - the request data is {"suite_id":1,"name":"Custom run name","include_all":false}'); expect(stdout).to.include('addRun: SUCCESS - the response data is {"suite_id":1,"name":"Custom run name","include_all":false,"id":2}'); done(); }); }); + + it('should call resultProcessor for passed case', (done) => { + exec(`${runner} --grep "@pass" -c "${mockTestrailConfigs.processor}"`, (err, stdout) => { + expect(stdout).to.include('addResultsForCases: SUCCESS - the request data is {"results":[{"case_id":"1","elapsed":"1s","comment":"FAIL COMMENT","status_id":5,"version":"1","my_test_custom_field":777}]}'); + done(); + }); + }); + + it('should call resultProcessor for failed case', (done) => { + exec(`${runner} --grep "@fail" -c "${mockTestrailConfigs.processor}"`, (err, stdout) => { + expect(stdout).to.include('addResultsForCases: SUCCESS - the request data is {"results":[{"case_id":"2","elapsed":"1s","comment":"FAIL COMMENT","status_id":5,"version":"1","my_test_custom_field":777}]}'); + done(); + }); + }); }); }); diff --git a/test/config/mockProcess.testrail.js b/test/config/mockProcess.testrail.js new file mode 100644 index 0000000..da8697f --- /dev/null +++ b/test/config/mockProcess.testrail.js @@ -0,0 +1,33 @@ +exports.config = { + tests: '../scenario.js', + output: '../output', + helpers: { + REST: { + endpoint: 'https://reqres.in', + } + }, + include: {}, + bootstrap: null, + mocha: {}, + name: 'codeceptjs-rest-demo', + plugins: { + testrail: { + require: '../../index.js', + enabled: true, + host: 'http://localhost:3000', + user: 'test', + password: 'pass', + suiteId: 1, + projectId: 1, + runId: 1, + runName: 'Custom run name', + debugLog: true, + closeTestRun: false, + testCase: { + passed: { comment: 'PASS COMMENT' }, + failed: { comment: 'FAIL COMMENT' }, + }, + resultProcessor: (result) => ({ ...result, my_test_custom_field: 777 }) + } + } +}; diff --git a/test/scenario.js b/test/scenario.js index a62a0d2..9b33680 100644 --- a/test/scenario.js +++ b/test/scenario.js @@ -1,10 +1,12 @@ const { expect } = require('chai'); const { faker } = require('@faker-js/faker'); -let userData; -const {I} = inject(); + +const { I } = inject(); Feature('PUT tests'); +let userData; + Before(() => { userData = { name: faker.internet.userName(),