From c161f87f25cca19bc1a80492e42e55d6856cc7c2 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Sat, 6 Feb 2021 14:48:29 +0200 Subject: [PATCH 01/50] Correct std output log name --- serve.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/serve.js b/serve.js index c6bc1fe..6e6267f 100644 --- a/serve.js +++ b/serve.js @@ -203,7 +203,7 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { ); fetchCommit(req.params.id, !isSHA, req.query.module) .then(id => { - let filename = log_only? `test_output.log` : `std_output-${lib.shortID(req.params.id)}.log`; + let filename = log_only? `test_output.log` : `std_output-${lib.shortID(id)}.log`; let logFile = path.join(config.dataPath, 'reports', id, filename); fs.readFile(logFile, 'utf8', (err, data) => { if (err) { From 7a9821a43efe7a8d8a99e9a8f21e2f01dbfd281a Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Sat, 6 Feb 2021 15:41:34 +0200 Subject: [PATCH 02/50] Added tests badge option --- lib.js | 25 ++++++++++++++++++++++--- serve.js | 2 +- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/lib.js b/lib.js index 1f7b5fc..a5b245a 100644 --- a/lib.js +++ b/lib.js @@ -397,14 +397,14 @@ function compareCoverage(job) { * Get the coverage results and build status data for the shields.io coverage badge API. * If test results don't exist, a new job is added to the queue and the message is set to 'pending' * @param {Object} data - An object with the keys 'sha', 'repo', 'owner' and 'context'. - * 'context' must be 'coverage' or 'status'. + * 'context' must be 'coverage', 'build', or 'tests'. */ function getBadgeData(data) { let id = data.sha; if (!id) { throw new ReferenceError('Invalid "sha" field in input data') } - var report = {'schemaVersion': 1, 'label': data.context === 'status'? 'build' : 'coverage'}; + var report = {'schemaVersion': 1, 'label': data.context}; // Try to load coverage record let record = data.force? [] : loadTestRecords(id); // If no record found @@ -421,7 +421,7 @@ function getBadgeData(data) { } else { record = Array.isArray(record) ? record.pop() : record; // in case of duplicates, take last switch (data.context) { - case 'status': + case 'build': if (record['status'] === 'error') { report['message'] = 'unknown'; report['color'] = 'orange'; @@ -429,6 +429,25 @@ function getBadgeData(data) { report['message'] = (record['status'] === 'success' ? 'passing' : 'failing'); report['color'] = (record['status'] === 'success' ? 'brightgreen' : 'red'); } + break; + case 'tests': + if (record['status'] === 'error') { + report['message'] = 'unknown'; + report['color'] = 'orange'; + } else { + if (record['statistics']) { + let pass = record['statistics']['passed']; + let fail = record['statistics']['failed'] + record['statistics']['errored']; + let skip = record['statistics']['skipped']; + report['message'] = `${pass} passed`; + if (fail > 0) { report['message'] += `, ${fail} failed`; } + if (skip > 0) { report['message'] += `, ${skip} skipped`; } + } else { + report['message'] = (record['status'] === 'success' ? 'passed' : 'failed') + } + report['color'] = (record['status'] === 'success' ? 'brightgreen' : 'red'); + } + break; case 'coverage': if (record['status'] === 'error' || !record['coverage']) { diff --git a/serve.js b/serve.js index 6e6267f..37aa528 100644 --- a/serve.js +++ b/serve.js @@ -242,7 +242,7 @@ srv.get('/:badge/:repo/:branch', async (req, res) => { // Find head commit of branch return request('GET /repos/:owner/:repo/git/refs/heads/:branch', data) .then(response => { - data['context'] = req.params.badge; + data['context'] = req.params.badge === 'status' ? 'build' : req.params.badge; data['sha'] = response.data.object.sha; data['force'] = req.query.force === '' || lib.strToBool(req.query.force); console.log(`Request for ${data.branch} ${data.context}`) From 4fb7f3dc967303304fc17bf718da4daad0cbea3b Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Sat, 6 Feb 2021 18:45:55 +0200 Subject: [PATCH 03/50] Bug fix for GitHub-X-Event rejection --- CHANGELOG.md | 9 ++++++++- README.md | 2 +- main.js | 5 ----- serve.js | 2 +- test/serve.test.js | 23 ++++++++++++++++++++--- 5 files changed, 30 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af4a9b5..79ec0a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog -## [Latest](https://github.com/cortex-lab/matlab-ci/commits/master) [2.2.0] +## [Latest](https://github.com/cortex-lab/matlab-ci/commits/master) [2.2.1] + +## Modified + + - fix error where github even incorrectly rejected + - fix bug incorrect log name when endpoint called with branch name + +## [2.2.0] ## Added - nyc dependency for manual coverage of matlab-ci diff --git a/README.md b/README.md index 19025f2..1d07a15 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # MATLAB-ci [![Build Status](https://travis-ci.com/cortex-lab/matlab-ci.svg?branch=master)](https://travis-ci.com/cortex-lab/matlab-ci) -[![Coverage](https://img.shields.io/badge/coverage-81.07-green)](https://img.shields.io/badge/coverage-72.35-yellowgreen) +[![Coverage](https://img.shields.io/badge/coverage-81.58-green)](https://img.shields.io/badge/coverage-72.35-yellowgreen) A small set of modules written in Node.js for running automated tests of MATLAB code in response to GitHub events. Also submits code coverage to the Coveralls API. diff --git a/main.js b/main.js index 15fa8ba..4535440 100644 --- a/main.js +++ b/main.js @@ -32,11 +32,6 @@ handler.on('*', evt => eventCallback(evt)); queue.on('error', _ => {}); -// Log handler errors -handler.on('error', function (err) { - console.error('Error:', err.message); -}) - // Log any unhandled errors process.on('unhandledRejection', (reason, p) => { console.log('Unhandled Rejection at: Promise', p, 'reason:', reason); diff --git a/serve.js b/serve.js index 6e6267f..32c8351 100644 --- a/serve.js +++ b/serve.js @@ -105,7 +105,7 @@ srv.post('/github', async (req, res, next) => { console.log('Post received') let id = req.header('x-github-hook-installation-target-id'); if (id != process.env.GITHUB_APP_IDENTIFIER) { next(); return; } // Not for us; move on - if (req.header('X-GitHub-Event') in supportedEvents) { + if (supportedEvents.includes(req.header('X-GitHub-Event'))) { await setAccessToken(); handler(req, res, () => res.end('ok')); } else { diff --git a/test/serve.test.js b/test/serve.test.js index c26c21d..7a16da7 100644 --- a/test/serve.test.js +++ b/test/serve.test.js @@ -253,11 +253,11 @@ describe("Github event handler callback", () => { }, }); await setAccessToken(); - scope.done(); + nock.cleanAll() } - before(function (done) { - setToken().then(() => done()); + before(async function () { + await setToken(); scope = nock('https://api.github.com', { reqheaders: { accept: 'application/vnd.github.machine-man-preview+json', @@ -839,6 +839,23 @@ describe('srv github/', () => { }); }); + it('expect token set', (done) => { + // Although the blob signature won't match, we can at least test that setAccessToken was called + request(srv) + .post(`/github`) // trailing slash essential + .set({ + 'X-GitHub-Event': 'push', + 'x-github-hook-installation-target-id': process.env.GITHUB_APP_IDENTIFIER, + 'X-Hub-Signature': {'sha': SHA}, + 'X-GitHub-Delivery': '72d3162e-cc78-11e3-81ab-4c9367dc0958' + }) + .end(function (err) { + expect(scope.pendingMocks().length).lt(2); // setAccessToken was called + err ? done(err) : done(); + }); + }); + + after(function () { clock.restore(); }); From a95c0f016cf588b1e2388da4c14fd33e2180eb4d Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Sat, 6 Feb 2021 20:18:54 +0200 Subject: [PATCH 04/50] #53 --- test/serve.test.js | 244 ++++++++++++++++++++++++--------------------- 1 file changed, 132 insertions(+), 112 deletions(-) diff --git a/test/serve.test.js b/test/serve.test.js index 7a16da7..7049510 100644 --- a/test/serve.test.js +++ b/test/serve.test.js @@ -10,7 +10,6 @@ const assert = require('chai').assert; const appAuth = require("@octokit/auth-app"); const APIError = require('../lib').APIError; -const lib = require('../lib'); const { updateStatus, setAccessToken, eventCallback, srv, prepareEnv, runTests, fetchCommit} = require('../serve'); const queue = require('../lib').queue; @@ -21,6 +20,50 @@ const APP_ID = process.env.GITHUB_APP_IDENTIFIER; const ENDPOINT = 'logs'; // The URL endpoint for fetching status check details const SHA = 'cabe27e5c8b8cb7cdc4e152f1cf013a89adc7a71' +/** + * This fixture ensures the `token` variable is not null. + * Must be called before any other nock fixtures or else they will be reset. + */ +async function setToken() { + const scope = nock('https://api.github.com'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) + .reply(201, {id: APP_ID}); + scope.post(`/app/installations/${APP_ID}/access_tokens`) + .reply(201, { + token: '#t0k3N', + permissions: { + checks: "write", + metadata: "read", + contents: "read" + }, + }); + await setAccessToken(); + nock.cleanAll() +} + +/** +* This fixture injects the default null token via setAccessToken. +*/ +async function resetToken() { + const token_default = {'tokenType': null}; + const sandbox = sinon.createSandbox({ + useFakeTimers: { + now: new Date(3000, 1, 1, 0, 0) + }}) + sandbox.stub(appAuth, 'createAppAuth').returns(async () => token_default); + try { await setAccessToken(); } catch (_) {} + sandbox.restore(); +} + +/** +* This fixture injects the default null token via setAccessToken. +*/ +async function mockToken(sandbox) { + await setToken(); // Ensure App id set + const token = {token: '#t0k3N'}; + return (sandbox || sinon).stub(appAuth, 'createAppAuth').returns(async () => token); +} + /** * This tests 'setAccessToken' which handles the app authentication. @@ -30,22 +73,7 @@ describe('setAccessToken', () => { var clock; // Our clock mock for replicable JWT const expiry = new Date(); // Date of token expiry - /** - * This fixture injects the default null token via setAccessToken. - */ - async function resetToken() { - const token_default = {'tokenType': null}; - const sandbox = sinon.createSandbox({ - useFakeTimers: { - now: new Date(3000, 1, 1, 0, 0) - }}) - sandbox.stub(appAuth, 'createAppAuth').returns(async () => token_default); - try { await setAccessToken(); } catch (_) {} - sandbox.restore(); - } - before(async function () { - await resetToken(); expiry.setTime(expiry.getTime() + 60e3); // 60s in the future // https://runkit.com/gr2m/reproducable-jwt clock = sinon.useFakeTimers({ @@ -54,8 +82,8 @@ describe('setAccessToken', () => { }); }); - beforeEach(function() { - // Mock for App.installationAccessToken + beforeEach(async function() { + await resetToken(); scope = nock('https://api.github.com', { reqheaders: { accept: 'application/vnd.github.machine-man-preview+json', @@ -79,7 +107,7 @@ describe('setAccessToken', () => { }); setAccessToken().then(function () { - scope.isDone(); + scope.done(); done(); }); }); @@ -91,7 +119,7 @@ describe('setAccessToken', () => { .matchHeader('authorization', `bearer ${token}`) .reply(201, {id: APP_ID}) scope.post(`/app/installations/${APP_ID}/access_tokens`) - .twice() // Should be called twice in a row + .once() // Should be called once .matchHeader('authorization', `bearer ${token}`) .reply(201, { token: '#t0k3N', @@ -105,7 +133,7 @@ describe('setAccessToken', () => { setAccessToken().then(async function () { await setAccessToken(); - scope.isDone(); + scope.done(); done(); }); }); @@ -129,11 +157,15 @@ describe('setAccessToken', () => { setAccessToken().then(async function () { await setAccessToken(); - scope.isDone(); + scope.done(); done(); }); }); + afterEach(() => { + nock.cleanAll(); + }); + after(async function() { clock.restore(); await resetToken(); @@ -149,15 +181,14 @@ describe("updateStatus", () => { var spy; // A spy for authentication var data; // Some job data to update the status with - beforeEach(function() { + beforeEach(async function() { // Mock for App.installationAccessToken scope = nock('https://api.github.com', { reqheaders: { accept: 'application/vnd.github.machine-man-preview+json', } }); - const token = {token: '#t0k3N'}; - spy = sinon.stub(appAuth, 'createAppAuth').returns(async () => token); + spy = await mockToken(); data = { sha: SHA, owner: 'okonkwe', @@ -167,13 +198,15 @@ describe("updateStatus", () => { }; }); + afterEach(() => { + nock.cleanAll(); + }); + it('updateStatus should post to given endpoint', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); scope.post(`/repos/${data['owner']}/${data['repo']}/statuses/${data['sha']}`).reply(201); updateStatus(data).then(() => { expect(spy.calledOnce).true; - scope.isDone(); + scope.done(); done(); }); }); @@ -190,8 +223,6 @@ describe("updateStatus", () => { body.description.length <= 140 && body.context === data.context; }; - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); scope.post(uri, requestBodyMatcher) .matchHeader('authorization', 'token #t0k3N') .reply(201); @@ -199,7 +230,7 @@ describe("updateStatus", () => { // Run updateStatus(data, url).then(() => { expect(spy.calledOnce).true; - scope.isDone(); + scope.done(); done(); }); }); @@ -236,28 +267,7 @@ describe("Github event handler callback", () => { var evt; // A payload event loaded from fixtures var sandbox; // Sandbox for spying on queue - /** - * This fixture ensures the `token` variable is not null. - */ - async function setToken() { - scope = nock('https://api.github.com'); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .reply(201, { - token: '#t0k3N', - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - await setAccessToken(); - nock.cleanAll() - } - - before(async function () { - await setToken(); + before(function () { scope = nock('https://api.github.com', { reqheaders: { accept: 'application/vnd.github.machine-man-preview+json', @@ -265,36 +275,39 @@ describe("Github event handler callback", () => { }); }); - beforeEach(function () { + beforeEach(async function () { queue.process(async (_job, _done) => {}) // nop sandbox = sinon.createSandbox() + await mockToken(sandbox); evt = JSON.parse(fs.readFileSync('./test/fixtures/pull_payload.json')); }); it('test callback adds pending jobs', (done) => { + let nCalls = 0; let pr = evt.pull_request; let uri = `/repos/${pr.head.repo.owner.login}/${pr.head.repo.name}/statuses/${pr.head.sha}`; - scope.post(uri, body => { return body.state === 'pending'}) + let testable = body => { + nCalls += 1; + if (nCalls === 2) { done(); } + return body.state === 'pending'; + }; + scope.post(uri, testable) .twice() .reply(201, {}); sandbox.spy(queue); - eventCallback({payload: evt, event: 'pull_request'}).then(function() { - expect(queue.pile.length).eq(2); // Two jobs should have been added - let data = queue.pile.pop().data; // Last added - let context = config.events.pull_request.checks; - expect(data.sha).eq(pr.head.sha); // Check head commit set - expect(data.base).eq(pr.base.sha); // Check base commit set - expect(data.force).not.true; // Check force is false (the previous job will save its results) - expect(data.owner).eq(pr.head.repo.owner.login); // Check repo owner set - expect(data.repo).eq(pr.head.repo.name); // Check repo name set - - expect(data.context.startsWith(context.pop())).true; - sandbox.assert.calledTwice(queue.add); - expect(queue.pile.pop().data.force).true; - - scope.isDone(); - done(); - }); + eventCallback({payload: evt, event: 'pull_request'}); + expect(queue.pile.length).eq(2); // Two jobs should have been added + let data = queue.pile.pop().data; // Last added + let context = config.events.pull_request.checks; + expect(data.sha).eq(pr.head.sha); // Check head commit set + expect(data.base).eq(pr.base.sha); // Check base commit set + expect(data.force).not.true; // Check force is false (the previous job will save its results) + expect(data.owner).eq(pr.head.repo.owner.login); // Check repo owner set + expect(data.repo).eq(pr.head.repo.name); // Check repo name set + + expect(data.context.startsWith(context.pop())).true; + sandbox.assert.calledTwice(queue.add); + expect(queue.pile.pop().data.force).true; }); it('test event type error', (done) => { @@ -342,6 +355,8 @@ describe("Github event handler callback", () => { queue.pile = []; sandbox.restore(); }); + + after(() => { nock.cleanAll(); }); }); @@ -364,6 +379,11 @@ describe('shields callback', () => { }; }); + after(() => { + nock.cleanAll(); + queue.pile = []; // ensure queue is empty + }); + it('expect coverage response', (done) => { // Set up response to GitHub API query // GET /repos/:owner/:repo/git/refs/heads/:branch @@ -380,7 +400,7 @@ describe('shields callback', () => { .expect('Content-Type', 'application/json') .expect(200) .end(function (err, res) { - scope.isDone(); + scope.done(); if (err) return done(err); expect(res.body).deep.keys([ 'schemaVersion', @@ -400,7 +420,7 @@ describe('shields callback', () => { .get(`/coverage/${info.repo}/${info.branch}`) .expect(404) .end(function (err) { - scope.isDone(); + scope.done(); if (err) return done(err); done(); }); @@ -422,7 +442,7 @@ describe('shields callback', () => { .expect('Content-Type', 'application/json') .expect(200) .end(function (err, res) { - scope.isDone(); + scope.done(); if (err) return done(err); expect(res.body).deep.keys([ 'schemaVersion', @@ -461,7 +481,7 @@ describe('logs endpoint', () => { beforeEach(function () { scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${SHA}`) - .reply(200, { sha: SHA }); + .reply(200, { sha: SHA }); }) it('expect HTML log', (done) => { @@ -499,6 +519,14 @@ describe('logs endpoint', () => { done(); }); }); + + afterEach(() => { + nock.cleanAll(); + }); + + after(() => { + sinon.restore(); + }); }); @@ -513,6 +541,10 @@ describe('fetchCommit', () => { scope = nock('https://api.github.com'); }); + after(function () { + nock.cleanAll(); + }); + it('expect full SHA from short id', (done) => { const id = SHA.slice(0, 7); scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) @@ -521,6 +553,7 @@ describe('fetchCommit', () => { fetchCommit(id) .then(id => { expect(id).eq(SHA); + scope.done(); done(); }); }); @@ -538,6 +571,7 @@ describe('fetchCommit', () => { fetchCommit(branch, true, repo) .then(id => { expect(id).eq(SHA); + scope.done(); done(); }); }); @@ -556,6 +590,11 @@ describe('records endpoint', () => { scope = nock('https://api.github.com'); }); + after(function () { + nock.cleanAll(); + }); + + it('expect JSON log', (done) => { scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${SHA}`) .reply(200, { sha: SHA }); @@ -568,6 +607,7 @@ describe('records endpoint', () => { if (err) return done(err); const record = JSON.parse(res.text); expect(record.commit).eq(SHA); + scope.done(); done(); }); }); @@ -585,6 +625,7 @@ describe('records endpoint', () => { if (err) return done(err); const record = JSON.parse(res.text); expect(record.commit).eq(SHA); + scope.done(); done(); }); }); @@ -600,6 +641,7 @@ describe('records endpoint', () => { .end(function (err, res) { if (err) return done(err); expect(res.text).contains('not found'); + scope.done(); done(); }); }); @@ -622,6 +664,7 @@ describe('records endpoint', () => { if (err) return done(err); const record = JSON.parse(res.text); expect(record.commit).eq(SHA); + scope.done(); done(); }); }); @@ -657,7 +700,7 @@ describe('coverage endpoint', () => { request(srv) .get(`/${ENDPOINT}/coverage/${SHA}/`) // trailing slash essential .expect(200) - .end(function (err, res) { + .end(function (err) { err? done(err) : done(); }); }); @@ -780,18 +823,11 @@ describe('running tests', () => { */ describe('srv github/', () => { var scope; // Our server mock - var clock; // Our clock mock for replicable JWT + var spy; // Token AppAuth spy - before(function() { - // https://runkit.com/gr2m/reproducable-jwt - clock = sinon.useFakeTimers({ - now: 0, - toFake: ['Date'] - }); - }); - - beforeEach(function() { + beforeEach(async function() { // Mock for App.installationAccessToken + spy = await mockToken(); scope = nock('https://api.github.com', { reqheaders: { accept: 'application/vnd.github.machine-man-preview+json', @@ -800,31 +836,16 @@ describe('srv github/', () => { }); it('expect skipped', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`).reply(200); - scope.post(`/app/installations/${APP_ID}/access_tokens`).reply(200); - request(srv) .post(`/github`) // trailing slash essential .set({'X-GitHub-Event': 'issues'}) - .end(function (err, res) { - expect(scope.isDone()).not.true; + .end(function (err) { + expect(spy.called).false; err ? done(err) : done(); }); }); it('expect error caught', (done) => { - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); - scope.post(`/app/installations/${APP_ID}/access_tokens`) - .reply(201, { - token: '#t0k3N', - permissions: { - checks: "write", - metadata: "read", - contents: "read" - }, - }); - request(srv) .post(`/github`) // trailing slash essential .set({ @@ -856,8 +877,9 @@ describe('srv github/', () => { }); - after(function () { - clock.restore(); + afterEach(function () { + spy.restore(); + nock.cleanAll(); }); }); @@ -872,13 +894,9 @@ describe('queue finish callback', () => { var scope; // Our server mock var spy; // A spy for authentication - before(function() { - // Mock for App.installationAccessToken + before(async function() { scope = nock('https://api.github.com'); - const token = {token: '#t0k3N'}; - spy = sinon.stub(appAuth, 'createAppAuth').returns(async () => token); - scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/installation`) - .reply(201, {id: APP_ID}); + spy = await mockToken(); }); it('test error handling', (done) => { @@ -905,5 +923,7 @@ describe('queue finish callback', () => { after(function() { delete queue.process; + nock.cleanAll(); + spy.restore(); }); }); From 472b5e17d2f41cd55f07e8103fef605e73928211 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Fri, 26 Feb 2021 15:25:52 +0200 Subject: [PATCH 05/50] Issue #57 --- lib.js | 20 ++++++++++++++++++-- serve.js | 3 +++ test/lib.test.js | 28 ++++++++++++++++++++++++---- 3 files changed, 45 insertions(+), 6 deletions(-) diff --git a/lib.js b/lib.js index a5b245a..8f7c919 100644 --- a/lib.js +++ b/lib.js @@ -149,7 +149,7 @@ function updateJobFromRecord(job) { job.data['status'] = rec['status']; job.data['description'] = rec['description']; job.data['coverage'] = ('coverage' in rec)? rec['coverage'] : null; - if (!job.data['coverage']) { + if (!job.data['coverage'] && rec['status'] !== 'error') { log('Coverage missing, computing from XML'); computeCoverage(job); // Attempt to load from XML } else if ((job.data.context || '').startsWith('coverage')) { @@ -177,6 +177,22 @@ function partial(func) { } +/** + * Append URL parameters to a URL. + * @param {String} url - The URL to append paramters to. + * @param {String} args - One or more URL parameters to append, e.g. 'param=value' + */ +function addParam(url, ...args) { + if (url.indexOf('&') === -1 && !url.endsWith('/')) { + url += '/' + } + for (param of args) { + url += (/\?/g.test(url)? '&' : '?') + param; + } + return url +} + + /** * Check if job already has record, if so, update from record and finish, otherwise call tests function. * @param {Object} job - Job object which is being processed. @@ -477,5 +493,5 @@ class APIError extends Error { module.exports = { ensureArray, loadTestRecords, compareCoverage, computeCoverage, getBadgeData, log, shortID, openTunnel, APIError, queue, partial, startJobTimer, updateJobFromRecord, shortCircuit, isSHA, - fullpath, strToBool, saveTestRecords, listSubmodules, getRepoPath + fullpath, strToBool, saveTestRecords, listSubmodules, getRepoPath, addParam } diff --git a/serve.js b/serve.js index 69d47f2..b646c7e 100644 --- a/serve.js +++ b/serve.js @@ -405,6 +405,9 @@ async function updateStatus(data, targetURL = '') { debug('Updating status to "%s" for %s @ %g', data['status'], (data['context'] || '').split('/').pop(), data['sha']); await setAccessToken(); + if (targetURL && data['repo'] !== process.env['REPO_NAME']) { + targetURL = lib.addParam(targetURL, 'module=' + data['repo']); + } return request("POST /repos/:owner/:repo/statuses/:sha", { owner: data['owner'] || process.env['REPO_OWNER'], repo: data['repo'] || process.env['REPO_NAME'], diff --git a/test/lib.test.js b/test/lib.test.js index cbf837a..7b1a9f9 100644 --- a/test/lib.test.js +++ b/test/lib.test.js @@ -54,8 +54,8 @@ describe('Test partial:', function() { it('expect curried function', function () { let f = (a, b) => { return a + b; }; let f0 = lib.partial(f); - expect(f0(2)).instanceOf(Function) - expect(f0(2, 2)).eq(4) + expect(f0(2)).instanceOf(Function); + expect(f0(2, 2)).eq(4); }); }); @@ -65,8 +65,28 @@ describe('Test partial:', function() { */ describe('Test getRepoPath:', function() { it('expect returned from env', function () { - let repoPath = lib.getRepoPath() - expect(repoPath).eq(process.env.REPO_PATH) + let repoPath = lib.getRepoPath(); + expect(repoPath).eq(process.env.REPO_PATH); + }); +}); + + +/** + * A test for the function addParam + */ +describe('Test addParam:', function() { + it('expect deals with slash', function () { + const url = 'https://example.com'; + const param = 'param=value'; + expect(lib.addParam(url, param)).eq(lib.addParam(url + '/', param)); + }); + + it('expect handles multiple params', function () { + const url = 'https://example.com'; + const param1 = 'param=value'; + const param2 = 'par=val'; + const expected = 'https://example.com/?param=value&par=val'; + expect(lib.addParam(url, param1, param2)).eq(expected); }); }); From 780f3b2a136f0c9abbba2a2cf06872957318bf0c Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Fri, 5 Mar 2021 13:55:51 +0200 Subject: [PATCH 06/50] Pipe stderr to file --- serve.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/serve.js b/serve.js index b646c7e..3a6767f 100644 --- a/serve.js +++ b/serve.js @@ -271,7 +271,7 @@ function runTests(job) { let fcn = lib.fullpath(config.test_function); debug('starting test child process %s', fcn); let ops = config.shell? {'shell': config.shell} : {}; - const runTests = cp.execFile(fcn, [sha, repoPath, config.dataPath], ops, (error, stdout, stderr) => { + const runTests = cp.spawn(fcn, [sha, repoPath, config.dataPath], ops, (error, stdout, stderr) => { debug('clearing job timer'); clearTimeout(timer); delete job.data.process; @@ -314,8 +314,10 @@ function runTests(job) { // Write output to file runTests.stdout.pipe(process.stdout); // Pipe to display + runTests.stderr.pipe(process.stderr); let logDump = fs.createWriteStream(logName, { flags: 'a' }); - runTests.stdout.pipe(logDump); + runTests.stdout.pipe(logDump); // Pipe to file + runTests.stderr.pipe(logDump); runTests.on('exit', () => { logDump.close(); }); return runTests; } @@ -347,10 +349,12 @@ function prepareEnv(job, callback) { callback(job); }); prepEnv.stdout.pipe(process.stdout); + prepEnv.stderr.pipe(process.stderr); fs.mkdir(path.join(logDir), { recursive: true }, (err) => { if (err) throw err; let logDump = fs.createWriteStream(logName, { flags: 'w' }); prepEnv.stdout.pipe(logDump); + prepEnv.stderr.pipe(logDump); prepEnv.on('exit', () => { logDump.close(); }); }); return prepEnv; From f22af6ea4d9606cc271c2b5b67780ac102894a67 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Fri, 12 Mar 2021 16:28:02 +0200 Subject: [PATCH 07/50] Coverage from SHA; job routine --- config/config.js | 5 +- config/settings.json | 6 +- lib.js | 24 +++- main.js | 6 +- serve.js | 291 +++++++++++++++++++++++-------------------- test/lib.test.js | 21 +++- test/serve.test.js | 271 ++++++++++++++++++++++++++-------------- 7 files changed, 378 insertions(+), 246 deletions(-) diff --git a/config/config.js b/config/config.js index 10d3c8b..ab73468 100644 --- a/config/config.js +++ b/config/config.js @@ -34,8 +34,6 @@ defaults = { testing = { listen_port: 3000, timeout: 60000, - setup_function: 'prep_env.BAT', - test_function: "run_tests.BAT", events: { push: { checks: "continuous-integration", @@ -47,6 +45,9 @@ testing = { ref_ignore: ["documentation", "gh-pages"] } }, + routines: { + '*': ["prep_env.BAT", "run_tests.BAT"] + }, dataPath: fixtureDir, dbFile: path.join(fixtureDir, dbFilename) // cache of test results } diff --git a/config/settings.json b/config/settings.json index d48b970..070aea7 100644 --- a/config/settings.json +++ b/config/settings.json @@ -1,9 +1,6 @@ { - "setup_function": "prep_env.BAT", - "test_function": "run_tests.BAT", "listen_port": 3000, "timeout": 480000, - "program": "python", "strict_coverage": false, "events": { "push": { @@ -15,5 +12,8 @@ "actions": ["opened", "synchronize", "reopened"], "ref_ignore": ["documentation", "gh-pages"] } + }, + "routines": { + "*": ["prep_env.BAT", "run_tests.BAT"] } } diff --git a/lib.js b/lib.js index 8f7c919..a60c7d6 100644 --- a/lib.js +++ b/lib.js @@ -73,6 +73,22 @@ function ensureArray(x) { return (Array.isArray(x))? x : [x]; } function strToBool(s) { return /^\s*(true|1|on)\s*$/i.test(s); } +/** + * Get the routine for a given context from the settings JSON. + * @param {String} context - The context. + * @returns {Array} The test routine, i.e. an array of functions/scripts to call + */ +function context2routine(context) { + const opts = ('routines' in config)? config['routines'] : null; + if (!opts) { return null; } + let routine = ('*' in opts)? opts['*'] : []; + if (context in opts) { + routine += ensureArray(opts[context]); + } + return routine; +} + + /** * Load test results from .db.json file. NB: Size and order of returned records not guaranteed * @param {string, array} id - Commit SHA. @@ -105,7 +121,7 @@ async function saveTestRecords(r) { throw new APIError('"commit" not in record(s)'); } try { - let data = await fs.promises.readFile(config.dbFile, 'utf8') + let data = await fs.promises.readFile(config.dbFile, 'utf8'); obj = ensureArray(JSON.parse(data)); let ids = r.map(x => x.commit); let records = obj.filter(o => ids.indexOf(o.commit) >= 0); @@ -179,7 +195,7 @@ function partial(func) { /** * Append URL parameters to a URL. - * @param {String} url - The URL to append paramters to. + * @param {String} url - The URL to append parameters to. * @param {String} args - One or more URL parameters to append, e.g. 'param=value' */ function addParam(url, ...args) { @@ -196,7 +212,7 @@ function addParam(url, ...args) { /** * Check if job already has record, if so, update from record and finish, otherwise call tests function. * @param {Object} job - Job object which is being processed. - * @param {Function} func - The tests function to run, e.g. `runTests`. + * @param {Function} func - The tests function to run, e.g. `buildRoutine`. */ function shortCircuit(job, func=null) { // job.data contains the custom data passed when the job was created @@ -493,5 +509,5 @@ class APIError extends Error { module.exports = { ensureArray, loadTestRecords, compareCoverage, computeCoverage, getBadgeData, log, shortID, openTunnel, APIError, queue, partial, startJobTimer, updateJobFromRecord, shortCircuit, isSHA, - fullpath, strToBool, saveTestRecords, listSubmodules, getRepoPath, addParam + fullpath, strToBool, saveTestRecords, listSubmodules, getRepoPath, addParam, context2routine } diff --git a/main.js b/main.js index 4535440..ebd11d6 100644 --- a/main.js +++ b/main.js @@ -4,10 +4,9 @@ * @todo save auxiliary configuration into a separate config file * @todo add abort option for when new commits added * @todo rename context to description and use context to track posts - * @todo fix intentions */ const { openTunnel, queue, shortCircuit} = require('./lib'); -const { srv, handler, eventCallback, runTests, prepareEnv} = require('./serve'); +const { srv, handler, eventCallback, buildRoutine} = require('./serve'); const config = require("./config/config").settings; @@ -15,8 +14,7 @@ const config = require("./config/config").settings; * Build queue processing pipeline. The shortCircuit call checks whether the results may be loaded from file, * bypassing the test function. */ -const run = (job) => { prepareEnv(job, runTests); }; -queue.process((job) => { shortCircuit(job, run); }); +queue.process((job) => { shortCircuit(job, buildRoutine); }); // NB: Only the supported events make it this far (i.e. push and pull requests) handler.on('*', evt => eventCallback(evt)); diff --git a/serve.js b/serve.js index 3a6767f..a1bb27f 100644 --- a/serve.js +++ b/serve.js @@ -8,7 +8,6 @@ const cp = require('child_process'); const express = require('express'); const srv = express(); -const shell = require('shelljs'); const app = require("@octokit/auth-app"); const { request } = require('@octokit/request'); @@ -233,25 +232,32 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { * Serve the coverage results and build status for the shields.io coverage badge API. Attempts to * load the test results from file and if none exist, adds a new job to the queue. */ -srv.get('/:badge/:repo/:branch', async (req, res) => { +srv.get('/:badge/:repo/:id', async (req, res) => { + const context = req.params.badge === 'status' ? 'build' : req.params.badge; const data = { owner: process.env['REPO_OWNER'], repo: req.params.repo, - branch: req.params.branch, + routine: lib.context2routine(context) + }; + // Check we have a matching routine + if (!data.routine) { + console.error(`No routine for "${context}" context`); + return res.sendStatus(404); } + let isSHA = lib.isSHA(req.params.id); // Find head commit of branch - return request('GET /repos/:owner/:repo/git/refs/heads/:branch', data) - .then(response => { - data['context'] = req.params.badge === 'status' ? 'build' : req.params.badge; - data['sha'] = response.data.object.sha; + return fetchCommit(req.params.id, !isSHA, req.params.repo) + .then(id => { + data['context'] = context; + data['sha'] = id; data['force'] = req.query.force === '' || lib.strToBool(req.query.force); - console.log(`Request for ${data.branch} ${data.context}`) + console.log(`Request for ${req.params.id} ${data.context}`) const report = lib.getBadgeData(data); // TODO If pending return 201, else 200 // Send report res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify(report));}) .catch(err => { // Specified repo or branch not found - console.error(`${data.owner}/${data.repo}/${data.branch} not found`) + console.error(`${data.owner}/${data.repo}/${req.params.id} not found`) res.sendStatus((err.status === 404) ? 404 : 500) }); }); @@ -259,134 +265,149 @@ srv.get('/:badge/:repo/:branch', async (req, res) => { ///////////////////// QUEUE EVENTS ///////////////////// -function runTests(job) { - const debug = log.extend('runTests'); - debug('starting job timer'); - const timer = lib.startJobTimer(job, config.kill_children === true); - - // Go ahead with tests - const sha = job.data['sha']; - const repoPath = lib.getRepoPath(job.data.repo); - const logName = path.join(config.dataPath, 'reports', sha, `std_output-${lib.shortID(sha)}.log`); - let fcn = lib.fullpath(config.test_function); - debug('starting test child process %s', fcn); - let ops = config.shell? {'shell': config.shell} : {}; - const runTests = cp.spawn(fcn, [sha, repoPath, config.dataPath], ops, (error, stdout, stderr) => { - debug('clearing job timer'); - clearTimeout(timer); - delete job.data.process; - if (error) { // Send error status - let message; - if (error.killed || error.signal === 'SIGTERM') { - message = `Tests stalled after ~${(config.timeout / 60000).toFixed(0)} min`; +/** + * Build task pipeline. Takes a list of scripts/functions and builds a promise chain. + * @param {Object} job - The path of the repository + * @returns {Promise} - The job routine + */ +async function buildRoutine(job) { + const debug = log.extend('pipeline'); + const data = job.data; + // Get task list from job data, or from context if missing + const tasks = data.routine? lib.ensureArray(data.routine) : lib.context2routine(data.context); + // Throw an error if there is no routine defined for this job + if (!tasks) { throw new Error(`No routine defined for context ${data.context}`); } + + debug('Building routine for job #%g', job.id); + // variables shared between functions + const repoPath = lib.getRepoPath(data.repo); + const sha = data['sha']; + const logDir = path.join(config.dataPath, 'reports', sha); + const logName = path.join(logDir, `std_output-${lib.shortID(sha)}.log`); + await fs.promises.mkdir(logDir, { recursive: true }); + const logDump = fs.createWriteStream(logName, { flags: 'w' }); + logDump.on('close', () => debug('Closing log file')); + const ops = config.shell? {'shell': config.shell} : {}; + + const init = () => debug('Executing pipeline for job #%g', job.id); + const routine = tasks.reduce(applyTask, Promise.resolve().then(init)); + return routine + .then(updateJob) + .catch(handleError) + .finally(() => logDump.close()) + + /** + * Build task pipeline. Should recursively call functions to produce chain of spawn callbacks. + * Must return promises. + * @param {Promise} pipeline - The promise chain to add to + * @param {String} task - The script + * @param {Number} idx - The current index in the pipeline + * @param {Array} taskList - An array of functions or scripts to execute consecutively + * @returns {Promise} - The job routine with `task` added to it. + */ + function applyTask(pipeline, task, idx, taskList) { + return pipeline.then(() => { + debug('Starting task "%s" (%i/%i)', task, idx + 1, taskList.length); + const timer = lib.startJobTimer(job, config.kill_children === true); + task = lib.fullpath(task); // Ensure absolute path + return new Promise(function (resolve, reject) { + // Spawn a process to execute our task + const child = cp.spawn(task, [sha, repoPath, logDir], ops); + let stdout = '', stderr = ''; + // Pipe output to log file + child.stdout.pipe(logDump, { end: false }); + child.stderr.pipe(logDump, { end: false }); + // Keep output around for reporting errors + child.stdout.on('data', chunk => { stdout += chunk; }); + child.stderr.on('data', chunk => { stderr += chunk; }); + // error emitted called when spawn itself fails, or process could not be killed + child.on('error', err => { + debug('clearing job timer'); + clearTimeout(timer); + reject(err);}) + .on('exit', () => { + debug('clearing job timer'); + clearTimeout(timer);}) + .on('close', (code, signal) => { + const callback = (code === 0)? resolve : reject; + const proc = { + code: code, + signal: signal, + stdout: stdout, + stderr: stderr, + process: child + }; + callback(proc); + }); + job.data.process = child; // Assign the child process to the job + }); + }); + } + + /** + * Handle any errors raised during the job routine. If any process exits with a non-zero code + * this handler will divine the error, update the record and trigger the relevant job callbacks. + * @param {Object} errored - The stdout, stderr, ChildProcess, exit code and signal, + * or a childProcess Error object. + */ + function handleError(errored) { + let message; // Error message to pass to job callbacks and to save into records + // The script that threw the error + const file = (errored instanceof Error)? errored.path : errored.process.spawnfile; + delete job.data.process; // Remove the process from the job data + + // Check if the error is a spawn error, this is thrown when spawn itself fails, i.e. due to + // missing shell script + if (errored instanceof Error) { + if (errored.code === 'ENOENT') { + // Note the missing file (not necessarily the task script that's missing) + message = file? `File "${file}" not found` : 'No such file or directory'; } else { - debug('error from test function: %o', error) - // Isolate error from log - // For MATLAB return the line that begins with 'Error' - let fn = (str) => { return str.startsWith('Error in \'') }; - message = stderr.split(/\r?\n/).filter(fn).join(';'); - // For Python, cat from the lost line that doesn't begin with whitespace - if (!message) { - let errArr = stderr.split(/\r?\n/); - let idx = errArr.reverse().findIndex(v => {return v.match('^\\S')}); - message = stderr.split(/\r?\n/).slice(-idx-1).join(';'); - } - if (!message) { message = error.code; } + message = `${errored.code} - Failed to spawn ${file}`; } - // Save error into records for future reference. NB: This is currently not done for prepEnv errors - let report = { - 'commit': sha, - 'results': message, - 'status': 'error', - 'description': 'Error running ' + (config.test_function || 'test function') - }; - lib.saveTestRecords(report).then(() => { debug('updated test records'); }); - job.done(new Error(message)); // Propagate - } else { - if (!lib.updateJobFromRecord(job)) { - job.done(new Error('Failed to return test result')); - } else { - job.done(); + // Check if the process was killed (we'll assume by the test timeout callback) + } else if (errored.process.killed || errored.signal === 'SIGTERM') { + message = `Tests stalled after ~${(config.timeout / 60000).toFixed(0)} min`; + } else { // Error raised by process; dig through stdout for reason + debug('error from test function %s', file) + // Isolate error from log + // For MATLAB return the line that begins with 'Error' + let fn = (str) => { return str.startsWith('Error in \'') }; + message = errored.stderr.split(/\r?\n/).filter(fn).join(';'); + // For Python, cat from the lost line that doesn't begin with whitespace + if (!message && errored.stderr.includes('Traceback ')) { + let errArr = errored.stderr.split(/\r?\n/); + let idx = errArr.reverse().findIndex(v => {return v.match('^\\S')}); + message = errored.stderr.split(/\r?\n/).slice(-idx-1).join(';'); } + // Otherwise simply use the full stderr (will be truncated) + if (!message) { message = errored.stderr; } } - }); - job.data.process = runTests; - - // Write output to file - runTests.stdout.pipe(process.stdout); // Pipe to display - runTests.stderr.pipe(process.stderr); - let logDump = fs.createWriteStream(logName, { flags: 'a' }); - runTests.stdout.pipe(logDump); // Pipe to file - runTests.stderr.pipe(logDump); - runTests.on('exit', () => { logDump.close(); }); - return runTests; -} - -function prepareEnv(job, callback) { - log('Preparing environment for job #%g', job.id) - const repoPath = lib.getRepoPath(job.data.repo); - switch (config.setup_function) { - case undefined: - // run some basic git commands - checkout(repoPath, job.data.sha); - return callback(job); - case null: // No prep required - return callback(job); - default: - const sha = job.data['sha']; - const logDir = path.join(config.dataPath, 'reports', sha); - const logName = path.join(logDir, `std_output-${lib.shortID(sha)}.log`); - log('Calling %s with args %o', config.setup_function, [sha, repoPath, logName]); - let fcn = lib.fullpath(config.setup_function); - let ops = config.shell? {'shell': config.shell} : {}; - const prepEnv = cp.execFile(fcn, [sha, repoPath, logDir], ops, (err, stdout, stderr) => { - if (err) { - let errmsg = (err.code === 'ENOENT')? `File "${fcn}" not found` : err.code; - console.error('Checkout failed: ' + (stderr || errmsg)); - job.done(new Error(`Failed to prepare env: ${stderr || errmsg}`)); // Propagate error - return; - } - callback(job); - }); - prepEnv.stdout.pipe(process.stdout); - prepEnv.stderr.pipe(process.stderr); - fs.mkdir(path.join(logDir), { recursive: true }, (err) => { - if (err) throw err; - let logDump = fs.createWriteStream(logName, { flags: 'w' }); - prepEnv.stdout.pipe(logDump); - prepEnv.stderr.pipe(logDump); - prepEnv.on('exit', () => { logDump.close(); }); - }); - return prepEnv; + // Save error into records for future reference. + let report = { + 'commit': sha, + 'results': message, + 'status': 'error', + 'description': 'Error running ' + (file || 'test routine') + }; + lib.saveTestRecords(report).then(() => { debug('updated test records'); }); + job.done(new Error(message)); // Propagate } -} -/** - * Checkout Git repository. - * @param {String} repoPath - The path of the repository - * @param {String} ref - A commit SHA or branch name - * @todo Add error handling - */ -function checkout(repoPath, ref) { - if (!shell.which('git')) { throw new Error('Git not found on path'); } - let verify = (cmd) => { if (!cmd) { - shell.popd(); - throw new Error('Failed to checkout: ' + cmd.stderr); - } }; - if (!shell.pushd(repoPath)) { - shell.mkdir(path.resolve(repoPath + path.sep + '..')); - shell.pushd(repoPath); - verify(shell.exec(`git clone https://github.com/${env.process['REPO_OWNER']}/${env.process['REPO_NAME']}.git`)); - verify(shell.exec(`git checkout ${ref}`)); - } else { - verify(shell.exec('git fetch -a')); - verify(shell.exec('git reset --hard HEAD')); - verify(shell.exec(`git checkout ${ref}`)); - verify(shell.exec('git submodule update --init --recursive')); - verify(shell.exec('git submodule foreach git reset --hard HEAD')); - verify(shell.exec('git status')); + /** + * Update the job and mark complete. Called when job routine completes without error. + * @param {Object} proc - The stdout, stderr, ChildProcess, exit code and signal + */ + function updateJob(proc) { + debug('Job routine complete'); + delete job.data.process; // Remove process from job data + // Attempt to update the job data from the JSON records, throw error if this fails + if (!lib.updateJobFromRecord(job)) { + job.done(new Error('Failed to return test result')); + } else { + job.done(); // All good + } } - shell.popd(); } @@ -435,7 +456,6 @@ async function updateStatus(data, targetURL = '') { * Payload reference https://developer.github.com/webhooks/event-payloads/ * @param {Object} event - The GitHub event object. * @todo Save full coverage object for future inspection - * @todo Add support for ignore list for specific actions * @todo Add support for regex in branch ignore list */ async function eventCallback (event) { @@ -451,7 +471,8 @@ async function eventCallback (event) { repo: event.payload.repository.name, // The repository name status: 'pending', // The check state to update our context with description: null, // A brief description of what transpired - context: null // The precise check name, keeps track of what check we're doing + context: null, // The precise check name, keeps track of what check we're doing + routine: null // A list of scripts call call } // Double-check the event was intended for our app. This is also done using the headers before @@ -527,6 +548,7 @@ async function eventCallback (event) { // Copy job data and update check specific fields let data = Object.assign({}, job_template); data.context = `${check}/${process.env['USERDOMAIN'] || process.env['NAME']}` + data.routine = lib.context2routine(check); switch (check) { case 'coverage': data.description = 'Checking coverage'; @@ -570,9 +592,10 @@ queue.on('finish', (err, job) => { // On job end post result to API var target = ''; // We will only update the endpoint for coverage jobs console.log(`Job #${lib.shortID(job.id)} finished` + (err ? ' with error' : '')); if (job.data.skipPost === true) { return; } + let context = job.data.context || ''; // Update target URL - if (!job.data.skipPost && job.data.context.startsWith('coverage')) { + if (!job.data.skipPost && context.startsWith('coverage')) { // No URL for coverage if errored target = err? '' : `${process.env['WEBHOOK_PROXY_URL']}/${ENDPOINT}/coverage/${job.data.sha}`; } else { @@ -594,5 +617,5 @@ queue.on('finish', (err, job) => { // On job end post result to API }); module.exports = { - updateStatus, srv, handler, setAccessToken, prepareEnv, runTests, eventCallback, fetchCommit + updateStatus, srv, handler, setAccessToken, eventCallback, fetchCommit, buildRoutine } diff --git a/test/lib.test.js b/test/lib.test.js index 7b1a9f9..720b602 100644 --- a/test/lib.test.js +++ b/test/lib.test.js @@ -76,9 +76,12 @@ describe('Test getRepoPath:', function() { */ describe('Test addParam:', function() { it('expect deals with slash', function () { - const url = 'https://example.com'; + let url = 'https://example.com'; const param = 'param=value'; expect(lib.addParam(url, param)).eq(lib.addParam(url + '/', param)); + url += '/foo'; + expect(lib.addParam(url, param)).eq(url + '/?' + param); + expect(lib.addParam(url, param)).eq(lib.addParam(url + '/', param)); }); it('expect handles multiple params', function () { @@ -91,6 +94,18 @@ describe('Test addParam:', function() { }); +/** + * A test for the function context2routine + */ +describe('Test context2routine:', function() { + it('expect returns default', function () { + const context = 'anything'; + const expected = config['routines']['*']; + expect(lib.context2routine(context)).eq(expected); + }); +}); + + /** * A test for the function compareCoverage. * @todo add test for strict compare @@ -379,7 +394,7 @@ describe("getBadgeData function", () => { // Failed tests input['sha'] = ids[0]; - input['context'] = 'status'; + input['context'] = 'build'; data = lib.getBadgeData(input); expected = { schemaVersion: 1, @@ -422,7 +437,7 @@ describe("getBadgeData function", () => { it('Check force flag', function () { input['sha'] = ids[1]; - input['context'] = 'status'; + input['context'] = 'build'; input['force'] = true; // set force flag to true const expected = { schemaVersion: 1, diff --git a/test/serve.test.js b/test/serve.test.js index 7a16da7..ca18da0 100644 --- a/test/serve.test.js +++ b/test/serve.test.js @@ -10,9 +10,7 @@ const assert = require('chai').assert; const appAuth = require("@octokit/auth-app"); const APIError = require('../lib').APIError; -const lib = require('../lib'); -const { updateStatus, setAccessToken, eventCallback, srv, prepareEnv, runTests, fetchCommit} = - require('../serve'); +const { updateStatus, setAccessToken, eventCallback, srv, fetchCommit, buildRoutine} = require('../serve'); const queue = require('../lib').queue; const config = require('../config/config').settings; const { stdErr, token } = require('./fixtures/static'); @@ -134,9 +132,9 @@ describe('setAccessToken', () => { }); }); - after(async function() { + after(function(done) { clock.restore(); - await resetToken(); + resetToken().then(done); }) }); @@ -144,7 +142,7 @@ describe('setAccessToken', () => { /** * This tests 'updateStatus' which handles updating the GitHub statues. */ -describe("updateStatus", () => { +describe('updateStatus', () => { var scope; // Our server mock var spy; // A spy for authentication var data; // Some job data to update the status with @@ -183,10 +181,10 @@ describe("updateStatus", () => { data.description = 'Lorem ipsum '.repeat(13); // Check max char data.context = 'ci/test'; const uri = `/repos/${data['owner']}/${data['repo']}/statuses/${data['sha']}`; - const url = `${process.env.WEBHOOK_PROXY_URL}/${ENDPOINT}/${data.sha}`; // target URL + const url = `${process.env.WEBHOOK_PROXY_URL}/${ENDPOINT}/${data.sha}`; const requestBodyMatcher = (body) => { return body.state === data.status && - body.target_url === url && + body.target_url === url + `/?module=${data['repo']}` && body.description.length <= 140 && body.context === data.context; }; @@ -231,7 +229,7 @@ describe("updateStatus", () => { * callback to check whether the event is configured in the settings and if so, should update the * check status to pending for each context, and add each job to the queue. */ -describe("Github event handler callback", () => { +describe('Github event handler callback', () => { var scope; // Our server mock var evt; // A payload event loaded from fixtures var sandbox; // Sandbox for spying on queue @@ -287,6 +285,7 @@ describe("Github event handler callback", () => { expect(data.force).not.true; // Check force is false (the previous job will save its results) expect(data.owner).eq(pr.head.repo.owner.login); // Check repo owner set expect(data.repo).eq(pr.head.repo.name); // Check repo name set + expect(data.routine).eq(config['routines']['*']); // Check routine expect(data.context.startsWith(context.pop())).true; sandbox.assert.calledTwice(queue.add); @@ -364,13 +363,18 @@ describe('shields callback', () => { }; }); + after(function () { + delete queue.process; + queue.pile = []; // ensure queue is empty + }); + it('expect coverage response', (done) => { // Set up response to GitHub API query - // GET /repos/:owner/:repo/git/refs/heads/:branch - scope.get(`/repos/${info.owner}/${info.repo}/git/refs/heads/${info.branch}`) + // GET /repos/:owner/:repo/branches/:branch + scope.get(`/repos/${info.owner}/${info.repo}/branches/${info.branch}`) .reply(200, { ref: `ref/heads/${info.branch}`, - object: { + commit: { sha: SHA } }); @@ -394,7 +398,7 @@ describe('shields callback', () => { it('expect errors', (done) => { // Set up response to GitHub API query - scope.get(`/repos/${info.owner}/${info.repo}/git/refs/heads/${info.branch}`).reply(404); + scope.get(`/repos/${info.owner}/${info.repo}/branches/${info.branch}`).reply(404); request(srv) .get(`/coverage/${info.repo}/${info.branch}`) @@ -406,19 +410,28 @@ describe('shields callback', () => { }); }); - it('expect job forced', (done) => { + // In order for this to work we need to clear the routine defaults from the settings + xit('expect context not found', done => { + request(srv) + .get(`/unknown/${info.repo}/${info.branch}`) + .expect(404) + .end(function (err) { + scope.isDone(); + done(err); + }); + }); + + it('expect job forced', done => { // Set up response to GitHub API query // GET /repos/:owner/:repo/git/refs/heads/:branch - scope.get(`/repos/${info.owner}/${info.repo}/git/refs/heads/${info.branch}`) + scope.get(`/repos/${info.owner}/${info.repo}/commits/${SHA}`) .reply(200, { - ref: `ref/heads/${info.branch}`, - object: { - sha: SHA - } + ref: `ref/heads/${SHA}`, + sha: SHA }); request(srv) - .get(`/coverage/${info.repo}/${info.branch}?force=1`) + .get(`/coverage/${info.repo}/${SHA}?force=1`) .expect('Content-Type', 'application/json') .expect(200) .end(function (err, res) { @@ -662,9 +675,10 @@ describe('coverage endpoint', () => { }); }); - after(function() { + after(done => { fs.rmdir(path.join(config.dataPath, 'reports'), {recursive: true}, err => { if (err) throw err; + done() }) }) @@ -672,105 +686,164 @@ describe('coverage endpoint', () => { /** - * This tests the runtests and prepareEnv functions. - * @todo Check for log close on exit + * This tests the buildRoutine function. */ describe('running tests', () => { var sandbox; // Sandbox for spying on queue - var stub; // Main fileExec stub + var spawnStub; // Main fileExec stub + var execEvent; + var job; + + before(() => { + sandbox = sinon.createSandbox(); + }); beforeEach(function () { - queue.process(async (_job, _done) => {}) // nop - sandbox = sinon.createSandbox() - stub = sandbox.stub(cp, 'execFile'); - sandbox.stub(fs, 'createWriteStream'); - sandbox.stub(fs, 'mkdir').callsArg(2); + spawnStub = sandbox.stub(cp, 'spawn'); execEvent = new events.EventEmitter(); - execEvent.stdout = new events.EventEmitter(); + execEvent.stdout = execEvent.stderr = new events.EventEmitter(); execEvent.stdout.pipe = sandbox.spy(); - stub.returns(execEvent); - }); - - it('test prepareEnv', async () => { - const callback = sandbox.spy(); - stub.callsArgAsync(3, null, 'preparing', ''); - const job = {data: {sha: SHA}}; - await prepareEnv(job, callback); - let log = path.join(config.dataPath, 'reports', SHA, 'std_output-cabe27e.log'); - let fn = path.resolve(path.join(__dirname, '..', 'prep_env.BAT')); - stub.calledWith(fn, [SHA, config.repo, config.dataPath]); - expect(callback.calledOnce).true; - expect(callback.calledOnceWithExactly(job)).true; - sandbox.assert.calledWith(fs.createWriteStream, log); - }); - - it('test prepareEnv with error', async (done) => { - stub.callsArgWith(3, {code: 'ENOENT'}, 'preparing', ''); - const job = { + job = { + id: 123, data: {sha: SHA}, - done: (err) => { - expect(err).instanceOf(Error); - expect(err.message).to.have.string('not found'); - done(); - } + done: () => {} }; - prepareEnv(job); }); - it('test runtests', async () => { - const callback = sandbox.spy(); - stub.callsArgWith(3, null, 'running tests', ''); - const job = { - data: {sha: SHA}, - done: callback - }; - await runTests(job); - const log = path.join(config.dataPath, 'reports', SHA, 'std_output-cabe27e.log'); - sandbox.assert.calledWith(fs.createWriteStream, log, { flags: 'a' }); - const fn = path.resolve(path.join(__dirname, '..', 'run_tests.BAT')); - stub.calledWith(fn, [SHA, config.repo, config.dataPath]); - expect(callback.calledOnce).true; - expect(callback.calledOnceWithExactly()).true; + it('expect default routine', fin => { + // Create a job field with no routine field + job.done = validate; + let log = path.join(config.dataPath, 'reports', SHA, 'std_output-cabe27e.log'); + let tasks = config['routines']['*'].map(x => path.resolve(path.join(__dirname, '..', x))); + spawnStub.callsFake(() => { + setImmediate(() => { execEvent.emit('exit', 0, null); }); + setImmediate(() => { execEvent.emit('close', 0, null); }); + return execEvent; + }); + buildRoutine(job); + function validate(err) { + for (let fn of tasks) { + spawnStub.calledWith(fn, [SHA, config.repo, config.dataPath]); + } + expect(spawnStub.calledTwice).true; + expect(err).undefined; + expect(fs.existsSync(log)).true; + fin(); + } + }); + + it('test missing file error', fin => { + job.done = validate; + + // Raise a file not found error + spawnStub.callsFake(() => { + const err = new Error('ENOENT'); + err.code = 'ENOENT'; + err.path = config['routines']['*'][0]; + setImmediate(() => { execEvent.emit('error', err, null); }); + return execEvent; + }); + sandbox.stub(fs.promises, 'writeFile'); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + buildRoutine(job).finally(fin); + function validate(err) { + expect(spawnStub.calledOnce).true; + expect(err.message).matches(/File ".*?" not found/); + } }); - it('runtests parses MATLAB error', (done) => { + it('runtests parses MATLAB error', (fin) => { var err; const errmsg = 'Error in MATLAB_function line 23'; - stub.callsArgWith(3, {code: 1}, 'running tests', errmsg); - sandbox.stub(fs.promises, 'writeFile').callsFake(() => { - sandbox.assert.calledWith(fs.promises.writeFile, config.dbFile); - expect(err).instanceOf(Error); + job.done = (e) => { err = e; }; + + // Exit with a MATLAB error + spawnStub.callsFake(() => { + setImmediate(() => { execEvent.stderr.emit('data', errmsg) }); + setImmediate(() => { execEvent.emit('exit', 1, null); }); + setImmediate(() => { execEvent.emit('close', 1, null); }); + return execEvent; + }); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + sandbox.stub(fs.promises, 'writeFile').callsFake((db_path, rec) => { + expect(db_path).eq(config.dbFile); + expect(rec).contains(errmsg); + expect(spawnStub.calledOnce).true; expect(err.message).to.have.string(errmsg); - done(); + fin(); }) - const job = { - data: {sha: SHA}, - done: (e) => { err = e; } - }; - runTests(job); + buildRoutine(job); }); - it('runtests parses Python error', (done) => { + it('runtests parses Python error', (fin) => { var err; - stub.callsArgWith(3, {code: 1}, 'running tests', stdErr); - sandbox.stub(fs.promises, 'writeFile').callsFake(() => { - sandbox.assert.calledWith(fs.promises.writeFile, config.dbFile); - expect(err).instanceOf(Error); - let errmsg = 'FileNotFoundError: Invalid data root folder E:\\FlatIron\\integration'; - expect(err.message.startsWith(errmsg)).true; - done(); + job.done = (e) => { err = e; }; + + // Exit with a Python error + spawnStub.callsFake(() => { + setImmediate(() => { execEvent.stderr.emit('data', stdErr) }); + setImmediate(() => { execEvent.emit('exit', 1, null); }); + setImmediate(() => { execEvent.emit('close', 1, null); }); + return execEvent; + }); + sandbox.stub(fs.promises, 'readFile').resolves('[]'); + sandbox.stub(fs.promises, 'writeFile').callsFake((db_path, rec) => { + expect(db_path).eq(config.dbFile); + let errmsg = 'FileNotFoundError: Invalid data root folder '; + expect(rec).contains(errmsg); + expect(spawnStub.calledOnce).true; + expect(err.message).to.have.string(errmsg); + fin(); }) - const job = { - data: {sha: SHA}, - done: (e) => { err = e; } - }; - runTests(job); + buildRoutine(job); + }); + + it('should open and close log', fin => { + const logSpy = { + close: sandbox.stub(), + on: () => {} + } + sandbox.stub(fs, 'createWriteStream').returns(logSpy); + sandbox.stub(fs, 'mkdir'); + logSpy.close.callsFake(fin); + spawnStub.callsFake(() => { + setImmediate(() => { execEvent.emit('exit', 0, null); }); + setImmediate(() => { execEvent.emit('close', 0, null); }); + return execEvent; + }); + buildRoutine(job); + }); + + it('expect loads test record', fin => { + queue.process(buildRoutine); + queue.on('error', _ => {}); + function validate (err, job) { + expect(err).undefined; + expect('process' in job.data).false; + expect(job.data.status).eq('failure'); + expect(job.data.coverage).approximately(22.1969, 0.001); + fin(); + } + sandbox.stub(queue._events, 'finish').value([validate]); + spawnStub.callsFake(() => { + setImmediate(() => { execEvent.emit('exit', 0, null); }); + setImmediate(() => { execEvent.emit('close', 0, null); }); + return execEvent; + }); + queue.add({sha: SHA}) }); - afterEach(function () { + afterEach(function (done) { queue.pile = []; + delete queue.process; sandbox.verifyAndRestore(); + const logDir = path.join(config.dataPath, 'reports'); + fs.rmdir(logDir, {recursive: true}, err => { + if (err) throw err; + done() + }); }); + }); @@ -839,6 +912,9 @@ describe('srv github/', () => { }); }); + /** + * This is already covered by the setAccessToken test... + */ it('expect token set', (done) => { // Although the blob signature won't match, we can at least test that setAccessToken was called request(srv) @@ -855,6 +931,9 @@ describe('srv github/', () => { }); }); + afterEach(function () { + nock.cleanAll() + }); after(function () { clock.restore(); @@ -897,10 +976,10 @@ describe('queue finish callback', () => { expect(body.context).eq(data['context']); expect(body.target_url).empty; // URL empty on errors done(); - return queue.pile.length === 0 + return queue.pile.length === 0; }; scope.post(uri, requestBodyMatcher).reply(201); - queue.add(data) // Create new job to process + queue.add(data); // Create new job to process }); after(function() { From 6703f65134c2a2d4823355a1dde262ff1bd31535 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 16:04:15 +0200 Subject: [PATCH 08/50] Live update of log --- package.json | 1 + public/format.js | 67 ++++++++++++++++++++++++++++++++++ public/highlight.css | 28 +++++++++++++++ public/log.html | 23 ++++++++++++ public/style.css | 86 ++++++++++++++++++++++++++++++++++++++++++++ serve.js | 66 ++++++++++++++++++++++++++-------- 6 files changed, 257 insertions(+), 14 deletions(-) create mode 100644 public/format.js create mode 100644 public/highlight.css create mode 100644 public/log.html create mode 100644 public/style.css diff --git a/package.json b/package.json index 85edd49..48bf9d3 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ "@octokit/request": "^5.4.9", "debug": "^4.3.1", "dotenv": "^8.2.0", + "escape-html": "^1.0.3", "express": "^4.17.1", "github-webhook-handler": "^1.0.0", "localtunnel": "^2.0.1", diff --git a/public/format.js b/public/format.js new file mode 100644 index 0000000..0a76800 --- /dev/null +++ b/public/format.js @@ -0,0 +1,67 @@ +/** + * A map of class ids and the regular expressions that capture the text to style + */ +const regExps = { + errorStack: /'^Traceback.*?(?=\n\S)'/gms, // Error stack + error: /^\w*Error.*?(?=\n\S)/gms, // Error statement + warning: /Warning:.*?(?=\n\S)/gms, // Warning + logInfo: /\[.*INFO.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.info + logWarn: /\[.*WARNING.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.warning + logError: /\[.*ERROR.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.error + logCritical: /\[.*CRITICAL.*\r?\n(?:^\s+.*\r?\n)*/gm // log.critical +}; +const cursor = ''; + +/** + * Given some text and a class name, return the text wrapped in a span of that class. + */ +function toSpan(text, className) { + return '' + text + ''; +} + +/** + * Fetch the raw log text from remote. + */ +async function updateLog() { + const contentDiv = document.querySelector('pre'); + const queryString = window.location.search; + const urlParams = new URLSearchParams(queryString); + const id = urlParams.get('id') + if (!id) { + contentDiv.innerHTML = 'ERROR: Log not found'; + return; + } + // const url = 'http://localhost:8080/log'; + const url = '/logs/raw/' + id; + // If the console is empty, add some loading text + if (!contentDiv.innerHTML) { + contentDiv.innerHTML = 'Loading log....' + cursor; + } + + // Fetch the remote log text + console.debug('Reloading log'); + let log = await (await fetch(url)).text(); + + // Apply the regex for styling/highlighting the text + log = log.replace(/\x1b?\[0m/gm, ''); // Remove escape chars + for (let style in regExps) { + log = log.replace(regExps[style], x => toSpan(x, style)); + } + + // If not static, add a little blinking cursor to indicate activity + if (urlParams.has('autoupdate')) { + log += cursor; + } + + // Check if you're at the bottom + const elem = document.getElementById('console'); + const atBottom = elem.scrollHeight - elem.scrollTop === elem.clientHeight; + + // Update console text + contentDiv.innerHTML = log; + + // If you were at the bottom, update scroll position + if (atBottom) { + elem.scrollTop = elem.scrollHeight; + } +} diff --git a/public/highlight.css b/public/highlight.css new file mode 100644 index 0000000..a17ec87 --- /dev/null +++ b/public/highlight.css @@ -0,0 +1,28 @@ +.logInfo { + color: cyan; +} + +.logWarn { + color: orange; + text-shadow: 0 0 5px; +} + +.logError { + color: red; + text-shadow: 0 0 5px; +} + +.logCritical { + color: purple; + text-shadow: 0 0 5px; +} + +.error { + color: red; + font-weight: bold; + text-shadow: 0 0 5px; +} + +.warning { + color: orange; +} diff --git a/public/log.html b/public/log.html new file mode 100644 index 0000000..ae272a0 --- /dev/null +++ b/public/log.html @@ -0,0 +1,23 @@ + + + + + Title + + + + + + +
+ + diff --git a/public/style.css b/public/style.css new file mode 100644 index 0000000..f173b41 --- /dev/null +++ b/public/style.css @@ -0,0 +1,86 @@ +#console { + margin: auto; + width: 80%; + display: flex; + border: 5px solid beige; + + background-color: black; + height: 90vh; + overflow: auto; + padding: 2rem; + color: white; + font: .8rem Inconsolata, monospace; + text-shadow: 0 0 5px #C8C8C8; +} + +body::after { + content: ""; + position: fixed; + top: 0; + left: 0; + width: 100vw; + height: 100vh; + background: repeating-linear-gradient( + 0deg, + rgba(0, 0, 0, 0.15), + rgba(0, 0, 0, 0.15) 1px, + transparent 1px, + transparent 2px + ); + pointer-events: none; +} + +::selection { + background: #ffffff; + color: #000000; + text-shadow: none; +} + +.blinking-cursor { + color: #2E3D48; + text-shadow: 0 0 2px #C8C8C8; + -webkit-animation: 1s blink step-end infinite; + -moz-animation: 1s blink step-end infinite; + -o-animation: 1s blink step-end infinite; + animation: 1s blink step-end infinite; +} + +@keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} + +@-moz-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} + +@-webkit-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} + +@-o-keyframes blink { + from, to { + color: transparent; + } + 50% { + color: black; + text-shadow: none; + } +} diff --git a/serve.js b/serve.js index a1bb27f..e0502e4 100644 --- a/serve.js +++ b/serve.js @@ -10,6 +10,7 @@ const express = require('express'); const srv = express(); const app = require("@octokit/auth-app"); const { request } = require('@octokit/request'); +const escapeHtml = require('escape-html'); const config = require('./config/config').settings; const queue = require('./lib').queue; // shared Queue from lib @@ -27,6 +28,8 @@ const secret = process.env['GITHUB_WEBHOOK_SECRET']; const supportedEvents = ['push', 'pull_request']; // events the ci can handle const maxN = 140; // The maximum n chars of the status description const ENDPOINT = 'logs'; // The URL endpoint for fetching status check details +// An optional static directory for serving css files +const STATIC = './public'; // Check all config events are supported const events = Object.keys(config.events); @@ -40,6 +43,7 @@ if (events.some(evt => { return !supportedEvents.includes(evt); })) { const createHandler = require('github-webhook-handler'); const handler = createHandler({ path: '/github', secret: secret, events: supportedEvents}); + /** * Fetch and assign the installation access token. Should be called each time a POST is made to * our app's endpoint. @@ -161,6 +165,11 @@ srv.get(`/coverage/:id`, (req, res) => { */ srv.use(`/${ENDPOINT}/coverage`, express.static(path.join(config.dataPath, 'reports'))) +/** + * Serve the css and javascript for the log Webpage. + */ +srv.use(`/static`, express.static(STATIC)) + /** * Serve the test records for requested commit id. Returns JSON data for the commit. */ @@ -202,21 +211,15 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { ); fetchCommit(req.params.id, !isSHA, req.query.module) .then(id => { - let filename = log_only? `test_output.log` : `std_output-${lib.shortID(id)}.log`; - let logFile = path.join(config.dataPath, 'reports', id, filename); - fs.readFile(logFile, 'utf8', (err, data) => { - if (err) { - log('%s', err.message); - res.statusCode = 404; - res.send(`Record for ${isSHA? 'commit' : 'branch'} ${id} not found`); - } else { - res.statusCode = 200; - // Wrap in HTML tags so that the formatting is a little nicer. - let preText = '
';
-               let postText = '
'; - res.send(preText + data + postText); + let url = lib.addParam('/static/log.html', `id=${id}`); + if (log_only) { url = lib.addParam(url, 'type=log'); } + for (let job of queue.pile) { + if (job.data.sha === id) { + url = lib.addParam(url, 'autoupdate='); + break; } - }); + } + res.redirect(301, url); }) .catch(err => { log('%s', err.message); @@ -226,6 +229,41 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { }); +/** + * Serve the test results for requested commit id. Returns the raw text log. + */ +srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { + let id = lib.shortID(req.params.id); + let log_only = (req.query.type || '').startsWith('log') + let filename = log_only? `test_output.log` : `std_output-${lib.shortID(id)}.log`; + let logFile = path.join(config.dataPath, 'reports', id, filename); + let jobStatus = 'unknown'; + for (let job of queue.pile) { + if (job.data.sha === req.params.id) { + jobStatus = running === true? 'running' : 'queued'; + break; + } + } + + fs.readFile(logFile, 'utf8', (err, data) => { + if (err) { + // Check if queued... + if (jobStatus === 'queued') { + data = 'Job waiting to start...'; + } else { + log('%s', err.message); + res.statusCode = 404; + res.send(`Record for commit ${id} not found`); + return; + } + } + res.statusCode = 200; + res.header('job_status', jobStatus); + res.send(escapeHtml(data)); + }); +}); + + ///////////////////// SHIELDS API EVENTS ///////////////////// /** From 16101d9657da67c235965897d43cfbb4735dc6d9 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 17:16:06 +0200 Subject: [PATCH 09/50] Send file instead of static --- public/format.js | 2 +- serve.js | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/public/format.js b/public/format.js index 0a76800..5fb7bff 100644 --- a/public/format.js +++ b/public/format.js @@ -26,7 +26,7 @@ async function updateLog() { const contentDiv = document.querySelector('pre'); const queryString = window.location.search; const urlParams = new URLSearchParams(queryString); - const id = urlParams.get('id') + const id = window.location.pathname.split('/').pop(); if (!id) { contentDiv.innerHTML = 'ERROR: Log not found'; return; diff --git a/serve.js b/serve.js index e0502e4..3231265 100644 --- a/serve.js +++ b/serve.js @@ -211,15 +211,15 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { ); fetchCommit(req.params.id, !isSHA, req.query.module) .then(id => { - let url = lib.addParam('/static/log.html', `id=${id}`); - if (log_only) { url = lib.addParam(url, 'type=log'); } - for (let job of queue.pile) { - if (job.data.sha === id) { - url = lib.addParam(url, 'autoupdate='); - break; - } - } - res.redirect(301, url); + // let url = lib.addParam('/static/log.html', `id=${id}`); + // if (log_only) { url = lib.addParam(url, 'type=log'); } + // for (let job of queue.pile) { + // if (job.data.sha === id) { + // url = lib.addParam(url, 'autoupdate='); + // break; + // } + // } + res.sendFile(path.join(STATIC + 'log.html')); }) .catch(err => { log('%s', err.message); From a693a36796086cff4c3e7fb0c21f21b39bae72b6 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 17:31:38 +0200 Subject: [PATCH 10/50] New endpoint --- serve.js | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/serve.js b/serve.js index 3231265..98d714a 100644 --- a/serve.js +++ b/serve.js @@ -219,7 +219,7 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { // break; // } // } - res.sendFile(path.join(STATIC + 'log.html')); + res.redirect(301, '/log/' + id); }) .catch(err => { log('%s', err.message); @@ -229,6 +229,17 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { }); +srv.get(`/log/:id`, function (req, res) { + try { + res.sendFile(path.join(STATIC + 'log.html')); + } catch (err) { + log('%s', err.message); + res.statusCode = 404; + res.send(`Record for commit ${req.params.id} not found`); + } +}); + + /** * Serve the test results for requested commit id. Returns the raw text log. */ From 5cb88cfe0ee44ac22155c1e108dad1685ac8911a Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 17:34:38 +0200 Subject: [PATCH 11/50] links relative to endpoint --- public/log.html | 6 +++--- serve.js | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/public/log.html b/public/log.html index ae272a0..504a8f2 100644 --- a/public/log.html +++ b/public/log.html @@ -3,9 +3,9 @@ Title - - - + + + -
diff --git a/serve.js b/serve.js index afdad48..cdc7c79 100644 --- a/serve.js +++ b/serve.js @@ -246,9 +246,8 @@ srv.get(`/log/:id`, function (req, res) { srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { let id = lib.shortID(req.params.id); let log_only = (req.query.type || '').startsWith('log') - let filename = log_only? `test_output.log` : `std_output-${lib.shortID(id)}.log`; - let logFile = path.join(config.dataPath, 'reports', req.params.id, filename); - let jobStatus = 'unknown'; + let filename = log_only? `test_output.log` : `std_output-${id}.log`; + let jobStatus = 'finished'; for (let job of queue.pile) { if (job.data.sha === req.params.id) { jobStatus = running === true? 'running' : 'queued'; @@ -256,22 +255,28 @@ srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { } } - fs.readFile(logFile, 'utf8', (err, data) => { + if (jobStatus === 'queued') { + res.statusCode = 200; + res.header('X-CI-JobStatus', jobStatus); + res.send('Job waiting to start...'); + return; + } + + const options = { + root: path.join(config.dataPath, 'reports', req.params.id), + headers: { + 'X-CI-JobStatus': jobStatus + } + }; + + res.sendFile(filename, options, function (err) { if (err) { - // Check if queued... - if (jobStatus === 'queued') { - data = 'Job waiting to start...'; - } else { - log('%s', err.message); - res.statusCode = 404; - res.send(`Record for commit ${id} not found`); - return; - } + console.error('Failed to send log: ', err); + } else { + log('Sent:', filename); } - res.statusCode = 200; - res.header('job_status', jobStatus); - res.send(escapeHtml(data)); }); + }); From 2424503ac6a62963c313267a43e2913915689891 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 21:06:39 +0200 Subject: [PATCH 15/50] Correct get headers --- public/format.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/public/format.js b/public/format.js index f697ce7..a8a0b56 100644 --- a/public/format.js +++ b/public/format.js @@ -60,7 +60,7 @@ async function updateLog() { console.error('Failed to return the log file'); return; } - lastModified = response.header('Last-Modified'); + lastModified = response.headers.get('Last-Modified'); let log = await (response).text(); log = escapeHTML(log); @@ -86,13 +86,15 @@ async function updateLog() { } // Call recursively - console.debug(response.header('X-CI-JobStatus')); + const jobStatus = response.headers.get('X-CI-JobStatus'); + console.debug(jobStatus); + if (!timer && urlParams.has('autoupdate')) { console.debug('Setting reload timer'); const timeout = urlParams.get('autoupdate') || 1000; // default 1 sec const minTimeout = 500; timer = setInterval(updateLog, Math.max(timeout, minTimeout)); - } else if (response.ok && response.header('X-CI-JobStatus') === 'finished' && timer) { + } else if (response.ok && jobStatus === 'finished' && timer) { console.debug('Clearing reload timer'); clearInterval(timer); timer = null; From 152c8efc98b5780e85886fee4488ee6ed6c4e85a Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 21:11:35 +0200 Subject: [PATCH 16/50] job check typo --- serve.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/serve.js b/serve.js index cdc7c79..d3abbaf 100644 --- a/serve.js +++ b/serve.js @@ -250,7 +250,7 @@ srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { let jobStatus = 'finished'; for (let job of queue.pile) { if (job.data.sha === req.params.id) { - jobStatus = running === true? 'running' : 'queued'; + jobStatus = job.running === true? 'running' : 'queued'; break; } } From 6ac73d231a9318876c640fe520bd0e7552fa896c Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 21:51:44 +0200 Subject: [PATCH 17/50] Update title --- public/format.js | 37 +++++++++++++++++++++++++++---------- public/log.html | 3 ++- public/style.css | 4 ++++ serve.js | 4 +++- 4 files changed, 36 insertions(+), 12 deletions(-) diff --git a/public/format.js b/public/format.js index a8a0b56..0aa9c0b 100644 --- a/public/format.js +++ b/public/format.js @@ -13,7 +13,9 @@ const regExps = { const cursor = ''; let timer = null; let lastModified = null; - +const id = window.location.pathname.split('/').pop(); +const heading = 'Job log for commit ' + shortID(id); +document.querySelector('pre').innerText = heading; /** * Given some text and a class name, return the text wrapped in a span of that class. @@ -26,6 +28,19 @@ function escapeHTML(str){ return new Option(str).innerHTML; } +/** + * Return a shortened version of an int or string id + * @param {any} v - ID to shorten. + * @param {int} len - Maximum number of chars. + * @returns {String} v as a short string. + */ +function shortID(v, len=7) { + if (Array.isArray(v)) { return v.map(v => shortID(v, len)); } + if (Number.isInteger(v)) { v = v.toString(); } + if (typeof v === 'string' || v instanceof String) { v = v.substr(0, len); } + return v; // If not string, array or number, leave unchanged +} + /** * Fetch the raw log text from remote. */ @@ -33,11 +48,6 @@ async function updateLog() { const contentDiv = document.querySelector('pre'); const queryString = window.location.search; const urlParams = new URLSearchParams(queryString); - const id = window.location.pathname.split('/').pop(); - if (!id) { - contentDiv.innerHTML = 'ERROR: Log not found'; - return; - } const url = '/logs/raw/' + id; // If the console is empty, add some loading text @@ -58,6 +68,10 @@ async function updateLog() { return; } else if (response.status !== 200) { console.error('Failed to return the log file'); + // If never loaded, change console text + if (!lastModified) { + contentDiv.innerHTML = toSpan('ERROR: Failed to load log', 'error'); + } return; } lastModified = response.headers.get('Last-Modified'); @@ -71,7 +85,7 @@ async function updateLog() { } // If not static, add a little blinking cursor to indicate activity - if (urlParams.has('autoupdate')) { log += cursor; } + if (urlParams.has('refresh')) { log += cursor; } // Update console text contentDiv.innerHTML = log; @@ -85,13 +99,16 @@ async function updateLog() { elem.scrollTop = elem.scrollHeight; } - // Call recursively + // Set title const jobStatus = response.headers.get('X-CI-JobStatus'); + const header = document.querySelector('h1') + header.innerText = `${heading} | ${jobStatus.toUpperCase()}`; + document.title = `Job ${jobStatus} for commit ${shortID(id)}`; console.debug(jobStatus); - if (!timer && urlParams.has('autoupdate')) { + if (!timer && urlParams.has('refresh')) { console.debug('Setting reload timer'); - const timeout = urlParams.get('autoupdate') || 1000; // default 1 sec + const timeout = urlParams.get('refresh') || 1000; // default 1 sec const minTimeout = 500; timer = setInterval(updateLog, Math.max(timeout, minTimeout)); } else if (response.ok && jobStatus === 'finished' && timer) { diff --git a/public/log.html b/public/log.html index ce0e0ea..fa09321 100644 --- a/public/log.html +++ b/public/log.html @@ -2,12 +2,13 @@ - Title + CI Log +

diff --git a/public/style.css b/public/style.css index f173b41..3c17c1e 100644 --- a/public/style.css +++ b/public/style.css @@ -13,6 +13,10 @@ text-shadow: 0 0 5px #C8C8C8; } +h1 { + text-align:center; +} + body::after { content: ""; position: fixed; diff --git a/serve.js b/serve.js index d3abbaf..c6ba912 100644 --- a/serve.js +++ b/serve.js @@ -603,9 +603,11 @@ async function eventCallback (event) { let data = Object.assign({}, job_template); data.context = `${check}/${process.env['USERDOMAIN'] || process.env['NAME']}` data.routine = lib.context2routine(check); + let targetURL = `${process.env['WEBHOOK_PROXY_URL']}/log/${data.sha}?refresh=1000`; switch (check) { case 'coverage': data.description = 'Checking coverage'; + targetURL = ''; // Must wait until end for coverage break; case 'continuous-integration': data.description = 'Tests running'; @@ -625,7 +627,7 @@ async function eventCallback (event) { * NB: If the tests and env prep are too quick our outcome may be updated before the pending * status. */ - updateStatus(data) + updateStatus(data, targetURL) .then(() => console.log(`Updated status to "pending" for ${data.context}`)) .catch(err => { console.log(`Failed to update status to "pending" for ${data.context}`); From c11122f8dc4b212b0c053affe3b808861a58ea67 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 21:58:50 +0200 Subject: [PATCH 18/50] Heading after load --- public/format.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/public/format.js b/public/format.js index 0aa9c0b..a60429f 100644 --- a/public/format.js +++ b/public/format.js @@ -15,7 +15,9 @@ let timer = null; let lastModified = null; const id = window.location.pathname.split('/').pop(); const heading = 'Job log for commit ' + shortID(id); -document.querySelector('pre').innerText = heading; +document.addEventListener('DOMContentLoaded', function() { + document.querySelector('h1').innerText = heading; +}, false); /** * Given some text and a class name, return the text wrapped in a span of that class. @@ -101,7 +103,7 @@ async function updateLog() { // Set title const jobStatus = response.headers.get('X-CI-JobStatus'); - const header = document.querySelector('h1') + const header = document.querySelector('h1'); header.innerText = `${heading} | ${jobStatus.toUpperCase()}`; document.title = `Job ${jobStatus} for commit ${shortID(id)}`; console.debug(jobStatus); From a4780e6c64695bdb129aa9d336aa021010138aae Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Mon, 15 Mar 2021 22:23:02 +0200 Subject: [PATCH 19/50] Fixed sticky scroll --- public/format.js | 18 ++++++++++-------- public/style.css | 5 ++--- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/public/format.js b/public/format.js index a60429f..da30224 100644 --- a/public/format.js +++ b/public/format.js @@ -81,32 +81,34 @@ async function updateLog() { log = escapeHTML(log); // Apply the regex for styling/highlighting the text - log = log.replace(/\x1b?\[0m/gm, ''); // Remove escape chars - for (let style in regExps) { - log = log.replace(regExps[style], x => toSpan(x, style)); + if (urlParams.get('formatting') !== 'off') { + log = log.replace(/\x1b?\[0m/gm, ''); // Remove escape chars + for (let style in regExps) { + log = log.replace(regExps[style], x => toSpan(x, style)); + } } // If not static, add a little blinking cursor to indicate activity if (urlParams.has('refresh')) { log += cursor; } - // Update console text - contentDiv.innerHTML = log; - // Check if you're at the bottom const elem = document.getElementById('console'); const atBottom = elem.scrollHeight - elem.scrollTop === elem.clientHeight; + // Update console text + contentDiv.innerHTML = log; + // If you were at the bottom, update scroll position if (atBottom) { + console.debug('Setting scroll height') elem.scrollTop = elem.scrollHeight; } // Set title const jobStatus = response.headers.get('X-CI-JobStatus'); const header = document.querySelector('h1'); - header.innerText = `${heading} | ${jobStatus.toUpperCase()}`; + header.innerText = `${heading} | ${jobStatus.toUpperCase()}`; document.title = `Job ${jobStatus} for commit ${shortID(id)}`; - console.debug(jobStatus); if (!timer && urlParams.has('refresh')) { console.debug('Setting reload timer'); diff --git a/public/style.css b/public/style.css index 3c17c1e..7ad93b8 100644 --- a/public/style.css +++ b/public/style.css @@ -3,9 +3,8 @@ width: 80%; display: flex; border: 5px solid beige; - background-color: black; - height: 90vh; + height: 80vh; overflow: auto; padding: 2rem; color: white; @@ -42,7 +41,7 @@ body::after { .blinking-cursor { color: #2E3D48; - text-shadow: 0 0 2px #C8C8C8; + text-shadow: 0 0 1px #C8C8C8; -webkit-animation: 1s blink step-end infinite; -moz-animation: 1s blink step-end infinite; -o-animation: 1s blink step-end infinite; From ff67f089e62c74360d9546b2a49c0e7e48082fc4 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Tue, 16 Mar 2021 13:16:45 +0200 Subject: [PATCH 20/50] Live refresh by default --- public/format.js | 20 +++++++++++--------- public/log.html | 10 ++++++++++ public/style.css | 19 +++++++++++++++++++ serve.js | 30 +++++++++++++----------------- 4 files changed, 53 insertions(+), 26 deletions(-) diff --git a/public/format.js b/public/format.js index da30224..9a51c39 100644 --- a/public/format.js +++ b/public/format.js @@ -2,9 +2,9 @@ * A map of class ids and the regular expressions that capture the text to style */ const regExps = { - errorStack: /'^Traceback.*?(?=\n\S)'/gms, // Error stack - error: /^\w*Error.*?(?=\n\S)/gms, // Error statement - warning: /Warning:.*?(?=\n\S)/gms, // Warning + errorStack: /^Traceback.*\r?\n(?:^\s+.*\r?\n)+/gm, // Error stack + error: /^\w*(Error|Exception).*\r?\n/gm, // Error statement + warning: /\w*Warning:.*\r?\n(?:^\s+.*\r?\n)/gm, // Warning logInfo: /\[.*INFO.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.info logWarn: /\[.*WARNING.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.warning logError: /\[.*ERROR.*\r?\n(?:^\s+.*\r?\n)*/gm, // log.error @@ -77,6 +77,7 @@ async function updateLog() { return; } lastModified = response.headers.get('Last-Modified'); + const jobStatus = response.headers.get('X-CI-JobStatus'); let log = await (response).text(); log = escapeHTML(log); @@ -89,7 +90,8 @@ async function updateLog() { } // If not static, add a little blinking cursor to indicate activity - if (urlParams.has('refresh')) { log += cursor; } + const isRunning = ['queued', 'running'].includes(jobStatus); + if (isRunning) { log += cursor; } // Check if you're at the bottom const elem = document.getElementById('console'); @@ -104,16 +106,16 @@ async function updateLog() { elem.scrollTop = elem.scrollHeight; } - // Set title - const jobStatus = response.headers.get('X-CI-JobStatus'); + // Set title, etc. const header = document.querySelector('h1'); header.innerText = `${heading} | ${jobStatus.toUpperCase()}`; document.title = `Job ${jobStatus} for commit ${shortID(id)}`; + document.getElementById('date').innerText = new Date(lastModified).toLocaleString(); - if (!timer && urlParams.has('refresh')) { + if (!timer && (urlParams.has('refresh') || isRunning)) { console.debug('Setting reload timer'); - const timeout = urlParams.get('refresh') || 1000; // default 1 sec - const minTimeout = 500; + const timeout = (urlParams.get('refresh') || 2) * 1000; // default 2 sec + const minTimeout = 500; // ms timer = setInterval(updateLog, Math.max(timeout, minTimeout)); } else if (response.ok && jobStatus === 'finished' && timer) { console.debug('Clearing reload timer'); diff --git a/public/log.html b/public/log.html index fa09321..6692f66 100644 --- a/public/log.html +++ b/public/log.html @@ -9,6 +9,16 @@

+

+ diff --git a/public/style.css b/public/style.css index 7ad93b8..a8245d9 100644 --- a/public/style.css +++ b/public/style.css @@ -10,12 +10,31 @@ color: white; font: .8rem Inconsolata, monospace; text-shadow: 0 0 5px #C8C8C8; + scrollbar-width: thin; +} + +p.date { + position : absolute; + top : 0; + right : 5px; + color: gray; + font: .8rem Inconsolata, monospace; } h1 { text-align:center; } +footer { + text-align: center; + padding: 1px; + background-color: bisque; +} + +a { + color: black; +} + body::after { content: ""; position: fixed; diff --git a/serve.js b/serve.js index c6ba912..50c1cd1 100644 --- a/serve.js +++ b/serve.js @@ -197,9 +197,8 @@ srv.get(`/${ENDPOINT}/records/:id`, function (req, res) { }); /** - * Serve the test results for requested commit id. This will be the result of a user clicking on - * the 'details' link next to the continuous integration check. The result should be an HTML - * formatted copy of the stdout for the job's process. + * Serve the test results for requested commit id. This endpoint parses and validates the id. + * If it corresponds to a valid commit SHA, the user is redirected to the log endpoint. */ srv.get(`/${ENDPOINT}/:id`, function (req, res) { let id = lib.shortID(req.params.id); @@ -210,17 +209,7 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { (isSHA? `commit ${id}` : `branch ${req.params.id}`) ); fetchCommit(req.params.id, !isSHA, req.query.module) - .then(id => { - // let url = lib.addParam('/static/log.html', `id=${id}`); - // if (log_only) { url = lib.addParam(url, 'type=log'); } - // for (let job of queue.pile) { - // if (job.data.sha === id) { - // url = lib.addParam(url, 'autoupdate='); - // break; - // } - // } - res.redirect(301, '/log/' + id); - }) + .then(id => res.redirect(301, '/log/' + id)) .catch(err => { log('%s', err.message); res.statusCode = 404; @@ -229,8 +218,13 @@ srv.get(`/${ENDPOINT}/:id`, function (req, res) { }); +/** + * Serve the test results for requested commit id. This will be the result of a user clicking on + * the 'details' link next to the continuous integration check. The result should be an HTML + * formatted copy of the stdout for the job's process. + */ srv.get(`/log/:id`, function (req, res) { - try { + try { // Send static HTML page template res.sendFile(path.join(__dirname, STATIC, 'log.html')); } catch (err) { log('%s', err.message); @@ -241,7 +235,9 @@ srv.get(`/log/:id`, function (req, res) { /** - * Serve the test results for requested commit id. Returns the raw text log. + * Serve the log file for requested commit id. This endpoint is fetched by the format.js script + * client side. Returns the raw text log along with a header to indicate whether the job is + * active. If the log hasn't changed since the last request, a 304 is returned instead. */ srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { let id = lib.shortID(req.params.id); @@ -603,7 +599,7 @@ async function eventCallback (event) { let data = Object.assign({}, job_template); data.context = `${check}/${process.env['USERDOMAIN'] || process.env['NAME']}` data.routine = lib.context2routine(check); - let targetURL = `${process.env['WEBHOOK_PROXY_URL']}/log/${data.sha}?refresh=1000`; + let targetURL = `${process.env['WEBHOOK_PROXY_URL']}/log/${data.sha}?refresh=1`; switch (check) { case 'coverage': data.description = 'Checking coverage'; From 41dd3839bec9bfe10aeff362be6a888fbab4516e Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Tue, 16 Mar 2021 14:46:15 +0200 Subject: [PATCH 21/50] Fixed date position --- public/highlight.css | 6 +----- public/style.css | 13 +++++++------ 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/public/highlight.css b/public/highlight.css index a17ec87..2172e58 100644 --- a/public/highlight.css +++ b/public/highlight.css @@ -2,7 +2,7 @@ color: cyan; } -.logWarn { +.logWarn .warning { color: orange; text-shadow: 0 0 5px; } @@ -22,7 +22,3 @@ font-weight: bold; text-shadow: 0 0 5px; } - -.warning { - color: orange; -} diff --git a/public/style.css b/public/style.css index a8245d9..d7fcdc4 100644 --- a/public/style.css +++ b/public/style.css @@ -1,4 +1,5 @@ #console { + position: relative; margin: auto; width: 80%; display: flex; @@ -6,19 +7,19 @@ background-color: black; height: 80vh; overflow: auto; - padding: 2rem; + padding: 1rem; color: white; font: .8rem Inconsolata, monospace; text-shadow: 0 0 5px #C8C8C8; scrollbar-width: thin; } -p.date { +p#date { position : absolute; - top : 0; - right : 5px; - color: gray; - font: .8rem Inconsolata, monospace; + top : -5px; + right : 10px; + color: grey; + text-shadow: none; } h1 { From 972ec28fb55bfc6da8f855720345008c96a7b682 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Tue, 16 Mar 2021 19:11:03 +0200 Subject: [PATCH 22/50] Records endpoint returns queued job data; Issue #62 --- package.json | 1 - queue.js | 3 ++ serve.js | 43 +++++++++++++++++++++------ test/serve.test.js | 72 ++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 109 insertions(+), 10 deletions(-) diff --git a/package.json b/package.json index 48bf9d3..85edd49 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,6 @@ "@octokit/request": "^5.4.9", "debug": "^4.3.1", "dotenv": "^8.2.0", - "escape-html": "^1.0.3", "express": "^4.17.1", "github-webhook-handler": "^1.0.0", "localtunnel": "^2.0.1", diff --git a/queue.js b/queue.js index 2bf66c4..8b3eaca 100644 --- a/queue.js +++ b/queue.js @@ -102,6 +102,8 @@ class Job extends EventEmitter { id; data; running; + created; + _child; /** * Create a job object with associated data. * @param {number} id - Job ID (unique in current Queue pile). @@ -116,6 +118,7 @@ class Job extends EventEmitter { this.id = id; this.data = data; this.running = false; + this.created = new Date(); } /** diff --git a/serve.js b/serve.js index 50c1cd1..0f11285 100644 --- a/serve.js +++ b/serve.js @@ -10,7 +10,6 @@ const express = require('express'); const srv = express(); const app = require("@octokit/auth-app"); const { request } = require('@octokit/request'); -const escapeHtml = require('escape-html'); const config = require('./config/config').settings; const queue = require('./lib').queue; // shared Queue from lib @@ -172,6 +171,7 @@ srv.use(`/static`, express.static(STATIC)) /** * Serve the test records for requested commit id. Returns JSON data for the commit. + * If no record exists and a job is queued the job data is sent, otherwise a 404. */ srv.get(`/${ENDPOINT}/records/:id`, function (req, res) { let id = lib.shortID(req.params.id); @@ -181,18 +181,32 @@ srv.get(`/${ENDPOINT}/records/:id`, function (req, res) { .then(id => { log('Commit ID found: %s', id); let record = lib.loadTestRecords(id); - if (record) { + if (record.length !== 0) { res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify(record)); } else { + // Check if on pile + for (let job of queue.pile) { + if (job.data.sha === id) { + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(job.data)); + return; + } + } + // Not on pile, return 404 res.statusCode = 404; - res.send(`${isSHA? 'Commit' : 'Branch'} ${id} not recognized.`); + res.send(`Record for ${isSHA? 'commit' : 'branch'} ${id} not found.`); } }) .catch(err => { - log('%s', err.message); - res.statusCode = 404; - res.send(`Record for ${isSHA? 'commit' : 'branch'} ${req.params.id} not found`); + if (err.status === 404) { + res.statusCode = 404; + res.send(`${isSHA? 'Commit' : 'Branch'} ${req.params.id} not found.`); + } else { + log('%s', err.message || err.name); + res.statusCode = 500; + res.send('Failed to read test records JSON'); + } }); }); @@ -276,6 +290,17 @@ srv.get(`/${ENDPOINT}/raw/:id`, function (req, res) { }); +/** + * Serve a list of currently cued jobs. + */ +srv.get('/jobs', function (req, res) { + const data = { total: queue.pile.length, pile: queue.pile }; + const replacer = (key, value) => { return (key[0] === '_')? undefined : value }; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(data, replacer)); +}); + + ///////////////////// SHIELDS API EVENTS ///////////////////// /** @@ -389,7 +414,7 @@ async function buildRoutine(job) { }; callback(proc); }); - job.data.process = child; // Assign the child process to the job + job._child = child; // Assign the child process to the job }); }); } @@ -404,7 +429,7 @@ async function buildRoutine(job) { let message; // Error message to pass to job callbacks and to save into records // The script that threw the error const file = (errored instanceof Error)? errored.path : errored.process.spawnfile; - delete job.data.process; // Remove the process from the job data + // delete job._child; // Remove the process from the job data // Check if the error is a spawn error, this is thrown when spawn itself fails, i.e. due to // missing shell script @@ -450,7 +475,7 @@ async function buildRoutine(job) { */ function updateJob(proc) { debug('Job routine complete'); - delete job.data.process; // Remove process from job data + // delete job._child; // Remove process from job data // Attempt to update the job data from the JSON records, throw error if this fails if (!lib.updateJobFromRecord(job)) { job.done(new Error('Failed to return test result')); diff --git a/test/serve.test.js b/test/serve.test.js index ca18da0..8d25555 100644 --- a/test/serve.test.js +++ b/test/serve.test.js @@ -567,6 +567,12 @@ describe('records endpoint', () => { before(function () { scope = nock('https://api.github.com'); + queue.process(async (_job, _done) => {}) // nop + }); + + after(function () { + queue.pile = []; + delete queue.process; }); it('expect JSON log', (done) => { @@ -617,6 +623,37 @@ describe('records endpoint', () => { }); }); + it('expect 500 on error', (done) => { + const id = SHA.replace('2', '3'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(500); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${id}`) + .expect(500) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('Failed'); + done(); + }); + }); + + it('expect queued job data', (done) => { + const id = SHA.replace('2', '3'); + scope.get(`/repos/${process.env.REPO_OWNER}/${process.env.REPO_NAME}/commits/${id}`) + .reply(200, { sha: id } ); + queue.add( {sha: id, status: 'pending'} ); + // Check JSON record returned + request(srv) + .get(`/${ENDPOINT}/records/${id}`) + .expect(200) + .end(function (err, res) { + if (err) return done(err); + expect(res.text).contains('pending'); + done(); + }); + }); + it('expect works with branch and module', (done) => { const branch = 'develop'; const repo = 'foobar'; @@ -642,6 +679,41 @@ describe('records endpoint', () => { }); +/** + * This tests the jobs endpoint. This endpoint should return the jobs pile. + */ +describe('jobs endpoint', () => { + + before(function () { + queue.process(async (_job, _done) => {}) // nop + }); + + after(function () { + queue.pile = []; + delete queue.process; + }); + + it('expect queue JSON', (done) => { + queue.add({sha: SHA, status: 'pending', context: 'continuous-integration'}); + queue.add({sha: SHA, status: 'pending', context: 'coverage'}); + // Check JSON record returned + request(srv) + .get('/jobs') + .expect(200) + .expect('Content-Type', 'application/json') + .end(function (err, res) { + if (err) return done(err); + const payload = JSON.parse(res.text); + expect(payload.total).eq(2); + expect(payload.pile[0].running).true; + expect(payload.pile[1].running).false; + expect(payload.pile[0]._child).undefined; + done(); + }); + }); +}); + + /** * This tests the coverage endpoint. Directly accessing endpoint should return 403. */ From f7a658eb24d600fb2d90df3ad794d7f1509e6c15 Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Tue, 16 Mar 2021 19:30:03 +0200 Subject: [PATCH 23/50] Created status when job triggered --- serve.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/serve.js b/serve.js index 0f11285..5d0ad2a 100644 --- a/serve.js +++ b/serve.js @@ -327,8 +327,9 @@ srv.get('/:badge/:repo/:id', async (req, res) => { data['sha'] = id; data['force'] = req.query.force === '' || lib.strToBool(req.query.force); console.log(`Request for ${req.params.id} ${data.context}`) - const report = lib.getBadgeData(data); // TODO If pending return 201, else 200 + const report = lib.getBadgeData(data); // Send report + res.statusCode = (report['message'] === 'pending')? 201 : 200; res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify(report));}) .catch(err => { // Specified repo or branch not found From 9a833ba3d04b3d8556e9f8b6fc619725e06af31e Mon Sep 17 00:00:00 2001 From: Miles Wells Date: Wed, 17 Mar 2021 17:58:37 +0200 Subject: [PATCH 24/50] Move date into console --- public/log.html | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/public/log.html b/public/log.html index 6692f66..f4942ef 100644 --- a/public/log.html +++ b/public/log.html @@ -9,8 +9,7 @@

-

-
+