Skip to content

Commit

Permalink
Merge pull request #68 from cortex-lab/dev
Browse files Browse the repository at this point in the history
v3.0.0
  • Loading branch information
k1o0 authored Apr 30, 2021
2 parents 3a7efa9 + df64c8c commit 5726447
Show file tree
Hide file tree
Showing 21 changed files with 3,619 additions and 2,199 deletions.
29 changes: 28 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,33 @@
# Changelog

## [Latest](https://github.com/cortex-lab/matlab-ci/commits/master) [2.2.0]
## [Latest](https://github.com/cortex-lab/matlab-ci/commits/master) [3.0.0]

## Added

- any number of tasks may be added for a job, which are then executed in series
- now serves a Webpage that shows the log in realtime
- added a jobs endpoint to see which jobs are on the pile
- stderr is piped to log file
- flake8 errors are neatly captured in GitHub status description
- param to skip checks when only ignored files changed
- param to skip draft PR event checks

## Modified

- renamed MATLAB-CI to labCI
- records endpoint can return pending jobs
- tests badge endpoint returns 'error' on errored tests instead of 'unknown'
- job waits for coverage calculation and updating of records before finishing
- On successful completion of tests the duration is appended to the description

## [2.2.1]

## Modified

- fix error where github event incorrectly rejected
- fix bug incorrect log name when endpoint called with branch name

## [2.2.0]

## Added
- nyc dependency for manual coverage of matlab-ci
Expand Down
19 changes: 12 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# MATLAB-ci
# LabCI
[![Build Status](https://travis-ci.com/cortex-lab/matlab-ci.svg?branch=master)](https://travis-ci.com/cortex-lab/matlab-ci)
[![Coverage](https://img.shields.io/badge/coverage-81.07-green)](https://img.shields.io/badge/coverage-72.35-yellowgreen)
[![Coverage](https://img.shields.io/badge/coverage-92.13-brightgreen)](https://img.shields.io/badge/coverage-72.35-yellowgreen)

A small set of modules written in Node.js for running automated tests of MATLAB code in response to GitHub events. Also submits code coverage to the Coveralls API.
A small set of modules written in Node.js for running automated tests of MATLAB and Python code in response to GitHub events. Also submits code coverage to the Coveralls API.

Currently unsupported:
* Running tests on forked repositories
Expand All @@ -26,11 +26,8 @@ Create a shell/batch script for preparing your environment, and one for running
Add these to the settings.json file in config:
```
{
"setup_function": "./prep_env.BAT",
"test_function": "./run_tests.BAT",
"listen_port": 3000,
"timeout": 480000,
"program": "python",
"strict_coverage": false,
"events": {
"push": {
Expand All @@ -40,15 +37,23 @@ Add these to the settings.json file in config:
"pull_request": {
"checks": ["continuous-integration", "coverage"],
"actions": ["opened", "synchronize", "reopened"],
"ref_ignore": ["documentation", "gh-pages"]
"ref_ignore": ["documentation", "gh-pages"],
"files_ignore": [".*\\.yml", ".*\\.md", "LICEN[SC]E"]
}
}
"routines": {
"*": ["prep_env.BAT", "run_tests.BAT"]
}
}
```
Some extra optional settings:

- `shell` - optional shell to use when calling scripts (see `child_process.execFile` options).
- `events:event:ref_include` - same as `ref_ignore`, but a pass list instead of block list.
- `events:event:files_ignore` - list of files whose changes can be ignored. If only ignored files
are changed checks are skipped.
- `events:pull_request:ignore_drafts` - if true draft pull request actions are skipped (NB: Be
sure to add 'ready_for_review' to the actions list when ignoring drafts).
- `kill_children` - if present and true, `tree-kill` is used to kill the child processes, required
if shell/batch script forks test process (e.g. a batch script calls python).
- `repos` - an array of submodules or map of modules to their corresponding paths.
Expand Down
70 changes: 35 additions & 35 deletions config/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,78 +2,78 @@ const userSettings = require('./settings.json') || {}; // User settings
const path = require('path');
env = process.env.NODE_ENV || 'production';
const appdata = process.env.APPDATA || process.env.HOME;
const dataPath = process.env.APPDATA? path.join(appdata, 'CI') : path.join(appdata, '.ci');
const dataPath = process.env.APPDATA ? path.join(appdata, 'CI') : path.join(appdata, '.ci');
const fixtureDir = path.resolve(__dirname, '..', 'test', 'fixtures');
const dbFilename = '.db.json';
let settings;

// Defaults for when there's no user file; will almost certainly fail
defaults = {
setup_function: null,
test_function: null,
const defaults = {
max_description_len: 140, // GitHub status API has a description char limit
listen_port: 3000,
timeout: 8*60000,
program: "python",
timeout: 8 * 60000,
strict_coverage: false,
events: {
push: {
checks: null,
ref_ignore: ["documentation", "gh-pages"]
ref_ignore: ['documentation', 'gh-pages']
},
pull_request: {
checks: ["continuous-integration", "coverage"],
actions: ["opened", "synchronize", "reopen"],
ref_ignore: ["documentation", "gh-pages"]
checks: ['continuous-integration', 'coverage'],
actions: ['opened', 'synchronize', 'reopen'],
ref_ignore: ['documentation', 'gh-pages']
}
},
dataPath: dataPath,
dbFile: path.join(dataPath, dbFilename)
}
};

// Settings for the tests
testing = {
const testing = {
listen_port: 3000,
timeout: 60000,
setup_function: 'prep_env.BAT',
test_function: "run_tests.BAT",
events: {
push: {
checks: "continuous-integration",
ref_ignore: "documentation"
checks: 'continuous-integration',
ref_ignore: 'documentation'
},
pull_request: {
checks: ["coverage", "continuous-integration"],
actions: ["opened", "synchronize"],
ref_ignore: ["documentation", "gh-pages"]
checks: ['coverage', 'continuous-integration'],
actions: ['opened', 'synchronize'],
ref_ignore: ['documentation', 'gh-pages']
}
},
routines: {
'*': ['prep_env.BAT', 'run_tests.BAT']
},
dataPath: fixtureDir,
dbFile: path.join(fixtureDir, dbFilename) // cache of test results
}
};

// Pick the settings to return
if (env.startsWith('test')) {
settings = testing;
settings = testing;
} else if (userSettings) {
settings = userSettings;
if (!('dbFile' in settings)) {
settings.dbFile = path.join(dataPath, dbFilename)
}
if (!('dataPath' in settings)) {
settings.dataPath = dataPath;
}
settings = userSettings;
} else {
settings = defaults;
settings = defaults;
}

// Ensure defaults for absent fields
for (let field in defaults) {
if (!(field in settings)) settings[field] = defaults[field];
}

// Check ENV set up correctly
required = ['GITHUB_PRIVATE_KEY', 'GITHUB_APP_IDENTIFIER', 'GITHUB_WEBHOOK_SECRET',
'WEBHOOK_PROXY_URL', 'REPO_PATH', 'REPO_NAME', 'REPO_OWNER', 'TUNNEL_HOST',
'TUNNEL_SUBDOMAIN'];
missing = required.filter(o => { return !process.env[o] });
'WEBHOOK_PROXY_URL', 'REPO_PATH', 'REPO_NAME', 'REPO_OWNER', 'TUNNEL_HOST',
'TUNNEL_SUBDOMAIN'];
missing = required.filter(o => {
return !process.env[o];
});
if (missing.length > 0) {
errMsg = `Env not set correctly; the following variables not found: \n${missing.join(', ')}`
throw ReferenceError(errMsg)
errMsg = `Env not set correctly; the following variables not found: \n${missing.join(', ')}`;
throw ReferenceError(errMsg);
}

module.exports = { settings }
module.exports = { settings };
6 changes: 3 additions & 3 deletions config/settings.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
{
"setup_function": "prep_env.BAT",
"test_function": "run_tests.BAT",
"listen_port": 3000,
"timeout": 480000,
"program": "python",
"strict_coverage": false,
"events": {
"push": {
Expand All @@ -15,5 +12,8 @@
"actions": ["opened", "synchronize", "reopened"],
"ref_ignore": ["documentation", "gh-pages"]
}
},
"routines": {
"*": ["prep_env.BAT", "run_tests.BAT"]
}
}
140 changes: 68 additions & 72 deletions coverage.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,9 @@
const fs = require('fs'),
xml2js = require('xml2js'),
crypto = require('crypto'),
assert = require('assert').strict,
parser = new xml2js.Parser(),
path = require('path');
var timestamp, cb;
var timestamp;

var token = process.env.COVERALLS_TOKEN;

Expand All @@ -33,14 +32,14 @@ var token = process.env.COVERALLS_TOKEN;
* Loads file containing source code, returns a hash and line count
* @param {String} path - Path to the source code file.
* @returns {Object} key `Hash` contains MD5 digest string of file; `count` contains number of lines in source file
* @todo Make asynchronous
*/
function md5(path) {
var hash = crypto.createHash('md5'); // Creating hash object
var buf = fs.readFileSync(path, 'utf-8'); // Read in file
var count = buf.split(/\r\n|\r|\n/).length; // Count the number of lines
hash.update(buf, 'utf-8'); // Update hash
return {hash: hash.digest('hex'), count: count};
const hash = crypto.createHash('md5'); // Creating hash object
const buf = fs.readFileSync(path, 'utf-8'); // Read in file
const count = buf.split(/\r\n|\r|\n/).length; // Count the number of lines
hash.update(buf, 'utf-8'); // Update hash

return {hash: hash.digest('hex'), count: count};
}


Expand All @@ -50,42 +49,41 @@ function md5(path) {
* @param {Array} classList - An array of class objects from the loaded XML file.
* @param {String} srcPath - The root path of the code repository.
* @param {String} sha - The commit SHA for this coverage test.
* @param {function} callback - The callback function to run when complete. Takes object containing array of source
* code files and their code coverage
* @returns {Object}
* @todo Generalize path default
* @fixme Doesn't work with python's coverage
*/
function formatCoverage(classList, srcPath, sha) {
var job = {};
var sourceFiles = [];
var digest;
srcPath = typeof srcPath != "undefined" ? srcPath : process.env.HOMEPATH; // default to home dir
// For each class, create file object containing array of lines covered and add to sourceFile array
classList.forEach( async c => {
let file = {}; // Initialize file object
let fullPath = c.$.filename.startsWith(srcPath)? c.$.filename : path.join(srcPath, c.$.filename);
digest = md5(fullPath); // Create digest and line count for file
let lines = new Array(digest.count).fill(null); // Initialize line array the size of source code file
c.lines[0].line.forEach(ln => {
let n = Number(ln.$.number);
if (n <= digest.count) {lines[n] = Number(ln.$.hits) }
});
// create source file object
file.name = c.$.filename;
file.source_digest = digest.hash;
file.coverage = lines; // file.coverage[0] == line 1
sourceFiles.push(file);
});
async function formatCoverage(classList, srcPath, sha) {
var job = {};
var sourceFiles = [];
var digest;
srcPath = typeof srcPath != 'undefined' ? srcPath : process.env.REPO_PATH; // default to home dir
// For each class, create file object containing array of lines covered and add to sourceFile array
await Promise.all(classList.map(async c => {
let file = {}; // Initialize file object
let fullPath = c.$.filename.startsWith(srcPath) ? c.$.filename : path.join(srcPath, c.$.filename);
digest = md5(fullPath); // Create digest and line count for file
let lines = new Array(digest.count).fill(null); // Initialize line array the size of source code file
c.lines[0].line.forEach(ln => {
let n = Number(ln.$.number);
if (n <= digest.count) {
lines[n] = Number(ln.$.hits);
}
});
// create source file object
file.name = c.$.filename;
file.source_digest = digest.hash;
file.coverage = lines; // file.coverage[0] == line 1
sourceFiles.push(file);
}));

job.repo_token = token; // env secret token?
job.service_name = `coverage/${process.env.USERDOMAIN}`;
// The associated pull request ID of the build. Used for updating the status and/or commenting.
job.service_pull_request = '';
job.source_files = sourceFiles;
job.commit_sha = sha;
job.run_at = timestamp; // "2013-02-18 00:52:48 -0800"
cb(job);
job.repo_token = token; // env secret token
job.service_name = `coverage/${process.env.USERDOMAIN}`;
// The associated pull request ID of the build. Used for updating the status and/or commenting.
job.service_pull_request = '';
job.source_files = sourceFiles;
job.commit_sha = sha;
job.run_at = timestamp; // "2013-02-18 00:52:48 -0800"
return job;
}

/**
Expand All @@ -95,44 +93,42 @@ function formatCoverage(classList, srcPath, sha) {
* @param {String} sha - The commit SHA for this coverage test
* @param {String} repo - The repo to which the commit belongs
* @param {Array} submodules - A list of submodules for separating coverage into
* @param {function} callback - The callback function to run when complete
* @see {@link https://github.com/cobertura/cobertura/wiki|Cobertura Wiki}
*/
function coverage(path, repo, sha, submodules, callback) {
cb = callback; // @fixme Making callback global feels hacky
fs.readFile(path, function(err, data) { // Read in XML file
if (err) {throw err} // @fixme deal with file not found errors
parser.parseString(data, function (err, result) { // Parse XML
// Extract root code path
const rootPath = (result.coverage.sources[0].source[0] || process.env.REPO_PATH).replace(/[\/|\\]+$/, '')
assert(rootPath.endsWith(process.env.REPO_NAME), 'Incorrect source code repository')
timestamp = new Date(result.coverage.$.timestamp*1000); // Convert UNIX timestamp to Date object
let classes = []; // Initialize classes array
function coverage(path, repo, sha, submodules) {
return fs.promises.readFile(path) // Read in XML file
.then(parser.parseStringPromise) // Parse XML
.then(result => {
// Extract root code path
const rootPath = (result.coverage.sources[0].source[0] || process.env.REPO_PATH)
.replace(/[\/|\\]+$/, '');
timestamp = new Date(result.coverage.$.timestamp * 1000); // Convert UNIX timestamp to Date object
let classes = []; // Initialize classes array

const packages = result.coverage.packages[0].package;
packages.forEach(pkg => { classes.push(pkg.classes[0].class) }); // Get all classes
classes = classes.reduce((acc, val) => acc.concat(val), []); // Flatten
const packages = result.coverage.packages[0].package;
packages.forEach(pkg => { classes.push(pkg.classes[0].class); }); // Get all classes
classes = classes.reduce((acc, val) => acc.concat(val), []); // Flatten

// The submodules
const byModule = {'main' : []};
submodules.forEach((x) => { byModule[x] = []; }); // initialize submodules
// The submodules
const byModule = {'main': []};
submodules.forEach((x) => { byModule[x] = []; }); // initialize submodules

// Sort into piles
byModule['main'] = classes.filter(function (e) {
if (e.$.filename.search(/(tests\\|_.*test|docs\\)/i) !== -1) {return false;} // Filter out tests and docs
if (!Array.isArray(e.lines[0].line)) {return false;} // Filter out files with no functional lines
for (let submodule of submodules) {
if (e.$.filename.startsWith(submodule)) {
byModule[submodule].push(e); return false;
}
}
return true;
// Sort into piles
byModule['main'] = classes.filter(function (e) {
if (e.$.filename.search(/(tests\\|_.*test|docs\\)/i) !== -1) return false; // Filter out tests and docs
if (!Array.isArray(e.lines[0].line)) return false; // Filter out files with no functional lines
for (let submodule of submodules) {
if (e.$.filename.startsWith(submodule)) {
byModule[submodule].push(e);
return false;
}
}
return true;
});
// Select module
let modules = byModule[repo] || byModule['main'];
return formatCoverage(modules, rootPath, sha);
});
// Select module
let modules = byModule[repo] || byModule['main'];
formatCoverage(modules, rootPath, callback);
});
});
}


Expand Down
Loading

0 comments on commit 5726447

Please sign in to comment.