diff --git a/Dockerfile b/Dockerfile index 31377b25..02b0fd59 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM node:7.6-alpine +FROM node:8.9-alpine MAINTAINER Ryan Gaus "rgaus.net" # Create a user to run the app and setup a place to put the app diff --git a/package.json b/package.json index 4d6244c0..b1e5ea5f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@backstroke/server", - "version": "2.1.0", + "version": "2.2.0", "description": "", "main": "src/server.js", "dependencies": { diff --git a/src/jobs/webhook-dispatcher/README.md b/src/jobs/webhook-dispatcher/README.md deleted file mode 100644 index 067c658d..00000000 --- a/src/jobs/webhook-dispatcher/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# Webhook Dispatcher Job - -This job is used to push automatic updates to the queue. Every 30 seconds, the database is queried -to see if there are any links that were last updated over 10 minutes ago. If so, an `AUTOMATIC` -update is pushed to the queue and the link is updated to make the last updated field the current -date. This has the effect of pushing a queue update every 10 minutes for each link. - -## Disambiguation - -Backstroke runs the auto syncing process on links every 10 minutes. When it's your link's turn, -here's what happens: -1. Within the database, each link stores the latest SHA of the upstream end of the link. -2. Github is queried to get the SHA at the head of the branch of the upstream that is being synced -from; in the above example, `upstream/foo`'s `master` branch would be fetched. -3. If the stored SHA and the fetched SHA differ, then dispatch a new link update into the queue. -4. Update the stored SHA in the database that is associated with the link to be the fetched SHA. - -For example, let's say we have two repositories, `upstream/foo`, and `fork/foo`. As the names -suggest, `fork/foo` is a fork of `upstream/foo`. Backstroke is configured to keep these two -repositories in sync on the `master` branches of both repositories: - -``` -In Database: -upstream/foo master = 39ef1a - -On Github: - - +-----------------------------------+ - | | - | upstream/foo master = 78bb0b6 | - | | - +-----------------------------------+ - || - || - \ / - \/ - +-----------------------------------+ - | | - | fork/foo master = 75c2be84 | - | | - +-----------------------------------+ -``` - -In the above example, the automatic linking process would create a pull request on `fork/foo`, since -the database's representation of `upstream/foo` differs from the actual representation of -`upstream/foo`. Afterwards, this would be the state of the system: - -``` -In Database: -upstream/foo master = 78bb0b6 - -On Github: - - +-----------------------------------+ - | | - | upstream/foo master = 78bb0b6 | - | | - +-----------------------------------+ - || - || - \ / - \/ - +-----------------------------------+ - | | - | fork/foo master = 75c2be84 | <- There's an open pull request to bring master up to 78bb0b6! - | | - +-----------------------------------+ -``` diff --git a/src/jobs/webhook-dispatcher/fetch-sha-for-upstream-branch.js b/src/jobs/webhook-dispatcher/fetch-sha-for-upstream-branch.js deleted file mode 100644 index 7346212f..00000000 --- a/src/jobs/webhook-dispatcher/fetch-sha-for-upstream-branch.js +++ /dev/null @@ -1,73 +0,0 @@ -import fetch from 'node-fetch'; - -import Debug from 'debug'; -const debug = Debug('backstroke:webhook:fetch-sha-for-upstream-branch'); - -export default async function fetchSHAForUpstreamBranch({ - id, - owner, - upstreamLastSHA, - upstreamOwner, - upstreamRepo, - upstreamBranch, -}) { - if (upstreamLastSHA) { - // Get the short version of the sha - const upstreamLastSHAShort = upstreamLastSHA ? upstreamLastSHA.slice(0, 8) : null; - - // First check. Search for the short versino of the last sha on the github repo page. The hope - // here is to avoid having to use a query with the api token if not required. - const foundLastSHAOnPage = await fetch( - `https://github.com/${upstreamOwner}/${upstreamRepo}/tree/${upstreamBranch}` - ).then(async resp => { - if (resp.ok) { - return resp.text().catch(err => null).then(data => { - // Search for the sha in the return from the github page. If it's found, then we know that - // it's the latest commit. - return data.match(new RegExp(`Latest commit[^]{0,150}${upstreamLastSHAShort}`, 'i')) !== null; - }); - } else { - return null; - } - }); - - if (foundLastSHAOnPage) { - debug('Link %o, Found commit hash on github page, so nothing changed.', id); - return upstreamLastSHA; - } - } - - // Second check. If no definitive answer was found by looking at the github page, then make an api - // call to github to figure it out. - debug('Link %o, Falling back to proper api call...', id); - - // Fetch the latest commit in the branch `upstreamBranch`. - let results = []; - try { - const resp = await fetch(`https://api.github.com/repos/${encodeURIComponent(upstreamOwner)}/${encodeURIComponent(upstreamRepo)}/commits`, { - qs: { - sha: upstreamBranch, - per_page: 1, - }, - headers: { - 'Authorization': `Bearer ${owner.accessToken}`, - }, - }); - - results = await resp.json(); - } catch (err) { - throw new Error(`Repository ${upstreamOwner}/${upstreamRepo} does not exist. ERROR = '${err.toString()}'`); - return null; - } - - debug('Link %o, Response from getting HEAD of %o branch on %o/%o: %o results', id, upstreamBranch, upstreamOwner, upstreamRepo, results.length); - - // The branch has no commits? No commit hash, so return null. - if (results.length === 0) { - return null; - } else { - // Return the HEAD commit hash for the upstream. - return results[0].sha; - } -} - diff --git a/src/jobs/webhook-dispatcher/index.js b/src/jobs/webhook-dispatcher/index.js deleted file mode 100644 index 55fea9d7..00000000 --- a/src/jobs/webhook-dispatcher/index.js +++ /dev/null @@ -1,79 +0,0 @@ -import {literal} from 'sequelize'; -import Debug from 'debug'; -const debug = Debug('backstroke:webhook:job'); - -const AUTOMATIC = 'AUTOMATIC'; -const UPDATE_SECONDS = 30; -const WEBHOOK_SYNC_DURATION = process.env.WEBHOOK_SYNC_DURATION || '10 minutes'; - -// Every 30 seconds, try to update a link. -export default function(Link, User, WebhookQueue, upstreamSHAChanged) { - webhookJob.apply(null, arguments); - return setInterval(() => webhookJob.apply(null, arguments), UPDATE_SECONDS * 1000); -} - -export async function webhookJob(Link, User, WebhookQueue, fetchSHAForUpstreamBranch) { - const links = await Link.findAll({ - where: { - name: {ne: ''}, - enabled: true, - lastSyncedAt: { - lt: literal(`now() - interval '${WEBHOOK_SYNC_DURATION}'`), - }, - - upstreamType: {ne: null}, - upstreamOwner: {ne: null}, - upstreamRepo: {ne: null}, - - forkType: {ne: null}, - forkOwner: {ne: null}, - forkRepo: {ne: null}, - }, - include: [{model: User, as: 'owner'}], - }); - - // No links to update? - if (!links || links.length === 0) { - debug('No links to update. Breaking...'); - return null; - } - - const responses = links.map(async link => { - let headSha; - try { - headSha = await fetchSHAForUpstreamBranch(link); - } catch (err) { - debug(`Error fetching upstream sha for %o: %o`, link.id, err.message); - headSha = null; - } - - // Before enqueuing an update, make sure that the commit hash actually changed of the upstream - debug(`Updating link %o, last updated = %o, last known SHA = %o, current SHA = %o`, link.id, link.lastSyncedAt, link.upstreamLastSHA, headSha); - - // Head sha of the upstream wasn't able to found. Maybe the repo was deleted? - if (link.upstreamLastSHA && !headSha) { - debug(`Unable to fetch upstream sha for link %o`, link.id); - - // Link hasn't been synced, ever, since no 'last sha' value is present. Sync it. - } else if (!link.upstreamLastSHA) { - await WebhookQueue.push({type: AUTOMATIC, user: link.owner, link}); - debug(`Update enqueued successfully for link %o. REASON = FIRST_SYNC`, link.id); - - // The upstream has new commits since the last polling attempt. Sync it. - } else if (link.upstreamLastSHA !== headSha) { - await WebhookQueue.push({type: AUTOMATIC, user: link.owner, link}); - debug(`Update enqueued successfully for link %o. REASON = UPSTREAM_NEW_COMMITS`, link.id); - - } else { - debug(`Link %o didn't change, update not required.`, link.id); - } - - // Update the link instance to say that the link has been synced (or, at least checked) - await Link.update({lastSyncedAt: new Date, upstreamLastSHA: headSha}, {where: {id: link.id}, limit: 1}); - return true; - }); - - return Promise.all(responses).catch(err => { - console.error('Error in syncing job:', err); - }); -} diff --git a/src/jobs/webhook-dispatcher/test.js b/src/jobs/webhook-dispatcher/test.js deleted file mode 100644 index 6b2e55bf..00000000 --- a/src/jobs/webhook-dispatcher/test.js +++ /dev/null @@ -1,119 +0,0 @@ -import assert from 'assert'; -import {webhookJob} from './index'; -import MockModel from '../../test-helpers/mock-model'; -import sinon from 'sinon'; - -const LONG_TIME_AGO = '2017-08-10T10:54:53.450Z'; - -const User = new MockModel(), - Link = new MockModel([], {owner: User}); - -const MockWebhookQueue = { - queue: [], - reset() { - this.queue = []; - }, - push(item) { - const id = (new Date()).getTime(); - this.queue.push({id, item}); - return Promise.resolve(id); - }, - pop() { - const popped = this.queue.pop(); - return Promise.resolve(popped ? {data: popped.item, id: popped.id}: null); - }, -}; - -Link.methods.display = function() { return this; } - -describe('webhook dispatcher job', function() { - let user, link, linkExistingUpstreamSHA; - beforeEach(async () => { - MockWebhookQueue.reset(); - - user = await User.create({username: 'ryan'}); - link = await Link.create({ - name: 'My Link', - enabled: true, - owner: user.id, - lastSyncedAt: LONG_TIME_AGO, - - upstreamType: 'repo', - upstreamOwner: 'foo', - upstreamRepo: 'bar', - upstreamIsFork: false, - upstreamBranches: '["master"]', - upstreamBranch: 'master', - - forkType: 'all-forks', - forkOwner: undefined, - forkRepo: undefined, - forkBranches: undefined, - forkBranch: undefined, - }); - linkExistingUpstreamSHA = await Link.create({ - name: 'My Link', - enabled: true, - owner: user.id, - lastSyncedAt: LONG_TIME_AGO, - - upstreamType: 'repo', - upstreamOwner: 'foo', - upstreamRepo: 'bar', - upstreamIsFork: false, - upstreamBranches: '["master"]', - upstreamBranch: 'master', - upstreamLastSHA: 'SOMERANDOMSHATHATWSONTHELINKALREADY', - - forkType: 'all-forks', - forkOwner: undefined, - forkRepo: undefined, - forkBranches: undefined, - forkBranch: undefined, - }); - }); - - it(`should queue an update if the link is brand new (ie, hasn't been updated before)`, async function() { - Link.findAll = sinon.stub().resolves([{...link, owner: user}]); - const fetchSHAForUpstreamBranch = sinon.stub().resolves(false); - - const result = await webhookJob(Link, User, MockWebhookQueue, fetchSHAForUpstreamBranch); - - // Item was added to the queue. - assert.equal(MockWebhookQueue.queue.length, 1); - assert.equal(MockWebhookQueue.queue[0].item.type, 'AUTOMATIC'); - assert.equal(MockWebhookQueue.queue[0].item.link.id, link.id); - assert.equal(MockWebhookQueue.queue[0].item.user.id, link.ownerId); - - // And the last synced time was updated. - assert.notEqual((await Link.findById(link.id)).lastSyncedAt, LONG_TIME_AGO); - }); - it(`should queue an update if the link's upstream has a new commit`, async function() { - Link.findAll = sinon.stub().resolves([{...linkExistingUpstreamSHA, owner: user}]); - const fetchSHAForUpstreamBranch = sinon.stub().resolves(true); // New commit on upstream! - - const result = await webhookJob(Link, User, MockWebhookQueue, fetchSHAForUpstreamBranch); - - // Item was added to the queue. - assert.equal(MockWebhookQueue.queue.length, 1); - assert.equal(MockWebhookQueue.queue[0].item.type, 'AUTOMATIC'); - assert.equal(MockWebhookQueue.queue[0].item.link.id, linkExistingUpstreamSHA.id); - assert.equal(MockWebhookQueue.queue[0].item.user.id, linkExistingUpstreamSHA.ownerId); - - // And the last synced time was updated. - assert.notEqual((await Link.findById(linkExistingUpstreamSHA.id)).lastSyncedAt, LONG_TIME_AGO); - }); - it(`should not fail if there are no links to be updated`, async function() { - Link.findAll = sinon.stub().resolves([]); // No links - const fetchSHAForUpstreamBranch = sinon.stub().resolves(false); - - const result = await webhookJob(Link, User, MockWebhookQueue, fetchSHAForUpstreamBranch); - assert.equal(result, null); - - // Queue is still empty - assert.equal(MockWebhookQueue.queue.length, 0); - - // Link last synced time wasn't changed. - assert.equal((await Link.findById(link.id)).lastSyncedAt, LONG_TIME_AGO); - }); -}); diff --git a/src/models.js b/src/models.js index 0b3beb8d..c2d2eacb 100644 --- a/src/models.js +++ b/src/models.js @@ -3,9 +3,6 @@ import debug from 'debug'; import uuid from 'uuid'; import fetch from 'node-fetch'; -import { webhookJob } from './jobs/webhook-dispatcher'; -import fetchSHAForUpstreamBranch from './jobs/webhook-dispatcher/fetch-sha-for-upstream-branch'; - import Redis from 'redis'; const redis = Redis.createClient(process.env.REDIS_URL); import RedisMQ from 'rsmq'; @@ -15,8 +12,9 @@ const redisQueue = new RedisMQ({ }); const ONE_HOUR_IN_SECONDS = 60 * 60; +const LINK_OPERATION_EXPIRY_TIME_IN_SECONDS = 24 * ONE_HOUR_IN_SECONDS; export const WebhookStatusStore = { - set(webhookId, status, expiresIn=24*ONE_HOUR_IN_SECONDS) { + set(webhookId, status, expiresIn=LINK_OPERATION_EXPIRY_TIME_IN_SECONDS) { return new Promise((resolve, reject) => { redis.set(`webhook:status:${webhookId}`, JSON.stringify(status), 'EX', expiresIn, (err, id) => { if (err) { @@ -28,18 +26,59 @@ export const WebhookStatusStore = { }); }); }, - get(webhookId) { + get(webhookId, hideSensitiveKeys=true) { return new Promise((resolve, reject) => { redis.get(`webhook:status:${webhookId}`, (err, data) => { if (err) { reject(err); } else { // Resolves the cached data. - resolve(JSON.parse(data)); + const parsed = JSON.parse(data); + if (hideSensitiveKeys) { + if (parsed) { + // Remove access token from response. + resolve({ + ...parsed, + link: { + ...(parsed.link || {}), + owner: { + ...(parsed.link ? parsed.link.owner : {}), + accessToken: undefined, + }, + }, + }); + } else { + // No operation id was found + return null; + } + } else { + resolve(parsed); + } } }); }); }, + getOperations(linkId) { + return new Promise((resolve, reject) => { + // Get unix epoch timestamp in seconds. + // FIXME: should use redis time. We're not accounting for any sort of server time drift here. + const timestamp = Math.floor(new Date().getTime() / 1000); + + // Return all operations associated with a given link that have happened in the last 24 hours. + redis.zrangebyscore( + `webhook:operations:${linkId}`, + timestamp - LINK_OPERATION_EXPIRY_TIME_IN_SECONDS, + timestamp, + (err, data) => { + if (err) { + reject(err); + } else { + resolve(data); + } + } + ); + }); + }, }; export const WebhookQueue = { @@ -298,13 +337,5 @@ if (require.main === module) { // From https://stackoverflow.com/questions/33673999/passing-context-to-interactive-node-shell-leads-to-typeerror-sandbox-argument Object.assign(repl.start(options).context, context); - } else if (process.argv[2] == 'manual-job') { - webhookJob(Link, User, WebhookQueue, fetchSHAForUpstreamBranch).then(resp => { - console.log('Completed. Response:'); - console.log(resp); - }).catch(err => { - console.error('Error:'); - console.error(error); - }); } } diff --git a/src/routes/links/get-operations/index.js b/src/routes/links/get-operations/index.js new file mode 100644 index 00000000..87b6e06c --- /dev/null +++ b/src/routes/links/get-operations/index.js @@ -0,0 +1,28 @@ +import Debug from 'debug'; +const debug = Debug('backstroke:links:getOperations'); + +// Return all link operations that are associated with a given link. +export default async function getOperations(req, res, Link, WebhookStatusStore) { + const link = await Link.findById(req.params.id); + + if (link && link.ownerId !== req.user.id) { + debug('LINK %o NOT OWNED BY %o', link.id, req.user.id); + throw new Error('No such link.'); + } else if (link) { + // Fetch all operations associated with a link. + const operations = await WebhookStatusStore.getOperations(link.id); + + // Allow passing an optional ?detail=true query param to lookup each operation. + if (req.query.detail && req.query.detail.toLowerCase() === 'true') { + // Lookup each operation in parallel, and return them. + const statuses = await Promise.all(operations.map(op => WebhookStatusStore.get(op))); + // Add an id back to each response + return statuses.map((status, index) => Object.assign({id: operations[index]}, status)); + } + + // Otherwise, return just hte operation ids. + return operations; + } else { + throw new Error('No such link.'); + } +} diff --git a/src/routes/links/get-operations/test.js b/src/routes/links/get-operations/test.js new file mode 100644 index 00000000..b62319dd --- /dev/null +++ b/src/routes/links/get-operations/test.js @@ -0,0 +1,171 @@ +import get from './'; + +import sinon from 'sinon'; +import assert from 'assert'; + +// Helper for mounting routes in an express app and querying them. +// import db from '../../test-helpers/create-database-model-instances'; +import issueRequest from '../../../test-helpers/issue-request'; +import MockModel from '../../../test-helpers/mock-model'; + +const User = new MockModel(), + Link = new MockModel([], {owner: User}); + +Link.methods.display = function() { return this; } + +describe('link get operations', () => { + let user, link, link2; + + beforeEach(async function() { + user = await User.create({username: 'ryan'}); + link = await Link.create({ + name: 'My Link', + enabled: true, + owner: user.id, + + upstreamType: 'repo', + upstreamOwner: 'foo', + upstreamRepo: 'bar', + upstreamIsFork: false, + upstreamBranches: '["master"]', + upstreamBranch: 'master', + + forkType: 'all-forks', + forkOwner: undefined, + forkRepo: undefined, + forkBranches: undefined, + forkBranch: undefined, + }); + }); + + it('should fetch all operations for a link in the past 24 hours', () => { + // Grab the most recent model from the mock (.models is a mock-specific property) + const linkModel = Link.models[Link.models.length - 1]; + + // Create a mock status store to use with this test. + const MockWebhookStatusStore = { + getOperations: sinon.stub().resolves(['adgrha', 'uyrjnh', 'brsnyi']), + get: sinon.stub().resolves({status: 'ok'}), + }; + + return issueRequest( + get, [Link, MockWebhookStatusStore], + '/operations/:id', user, { + method: 'GET', + url: `/operations/${linkModel.id}`, + json: true, + } + ).then(res => { + const body = res.body; + assert.deepEqual(body, ['adgrha', 'uyrjnh', 'brsnyi']); + + // Assert that `.get` wasn't called. + assert.equal(MockWebhookStatusStore.getOperations.callCount, 1); + assert.equal(MockWebhookStatusStore.get.callCount, 0); + }); + }); + it('should try to fetch all operations for a link, but fail if the link id is bad', () => { + // Create a mock status store to use with this test. + const MockWebhookStatusStore = { + getOperations: sinon.stub().resolves(['adgrha', 'uyrjnh', 'brsnyi']), + get: sinon.stub().resolves({status: 'ok'}), + }; + + return issueRequest( + get, [Link, MockWebhookStatusStore], + '/:id', user, { + method: 'GET', + url: `/13527501385710357139f313`, // Bogus id + json: true, + } + ).then(res => { + const body = res.body; + assert.equal(body.error, 'No such link.'); + + // Assert that both functions weren't called. + assert.equal(MockWebhookStatusStore.getOperations.callCount, 0); + assert.equal(MockWebhookStatusStore.get.callCount, 0); + }); + }); + it('should try to fetch all operations for a link, but fail when the redis call fails', () => { + // Grab the most recent model from the mock (.models is a mock-specific property) + const linkModel = Link.models[Link.models.length - 1]; + + // Create a mock status store to use with this test. + const MockWebhookStatusStore = { + getOperations: sinon.stub().rejects(new Error('Boom!')), + get: sinon.stub().resolves({status: 'ok'}), + }; + + return issueRequest( + get, [Link, MockWebhookStatusStore], + '/operations/:id', user, { + method: 'GET', + url: `/operations/${linkModel.id}`, + json: true, + } + ).then(res => { + const body = res.body; + assert.equal(body.error, 'Boom!'); + + // Assert that `.get` wasn't called. + assert.equal(MockWebhookStatusStore.getOperations.callCount, 1); + assert.equal(MockWebhookStatusStore.get.callCount, 0); + }); + }); + it('should fetch all operations for a link in the past 24 hours, with ?detail=true param', () => { + // Grab the most recent model from the mock (.models is a mock-specific property) + const linkModel = Link.models[Link.models.length - 1]; + + // Create a mock status store to use with this test. + const MockWebhookStatusStore = { + getOperations: sinon.stub().resolves(['adgrha', 'uyrjnh', 'brsnyi']), + get: sinon.stub().resolves({status: 'OK'}), + }; + + return issueRequest( + get, [Link, MockWebhookStatusStore], + '/operations/:id', user, { + method: 'GET', + url: `/operations/${linkModel.id}?detail=true`, + json: true, + } + ).then(res => { + const body = res.body; + assert.deepEqual(body, [ + {id: 'adgrha', status: 'OK'}, + {id: 'uyrjnh', status: 'OK'}, + {id: 'brsnyi', status: 'OK'}, + ]); + + // Assert that `.getOperations` was called once and `.get` was called three times. + assert.equal(MockWebhookStatusStore.getOperations.callCount, 1); + assert.equal(MockWebhookStatusStore.get.callCount, 3); + }); + }); + it('should try to fetch all operations for a link with ?detail=true param, but fail when the redis call fails', () => { + // Grab the most recent model from the mock (.models is a mock-specific property) + const linkModel = Link.models[Link.models.length - 1]; + + // Create a mock status store to use with this test. + const MockWebhookStatusStore = { + getOperations: sinon.stub().resolves(['adgrha', 'uyrjnh', 'brsnyi']), + get: sinon.stub().rejects(new Error('Boom!')), + }; + + return issueRequest( + get, [Link, MockWebhookStatusStore], + '/operations/:id', user, { + method: 'GET', + url: `/operations/${linkModel.id}?detail=TRUE`, + json: true, + } + ).then(res => { + const body = res.body; + assert.equal(body.error, 'Boom!'); + + assert.equal(MockWebhookStatusStore.getOperations.callCount, 1); + assert.equal(MockWebhookStatusStore.get.callCount, 3); + }); + }); +}); diff --git a/src/server.js b/src/server.js index 560c062f..0e55b0b9 100644 --- a/src/server.js +++ b/src/server.js @@ -41,12 +41,11 @@ import checkRepo from './routes/checkRepo'; import manual from './routes/webhook/manual'; import status from './routes/webhook/status'; -import webhookDispatcherJob from './jobs/webhook-dispatcher'; -import fetchSHAForUpstreamBranch from './jobs/webhook-dispatcher/fetch-sha-for-upstream-branch'; import { isCollaboratorOfRepository } from './routes/links/update/helpers'; import linksList from './routes/links/list'; import linksGet from './routes/links/get'; +import linksGetOperations from './routes/links/get-operations'; import linksCreate from './routes/links/create'; import linksDelete from './routes/links/delete'; import linksUpdate from './routes/links/update'; @@ -74,14 +73,6 @@ if (process.env.SENTRY_CONFIG) { Raven.config(process.env.SENTRY_CONFIG).install(); } -// ---------------------------------------------------------------------------- -// Start the webhook job to add to the queue automatically -// ---------------------------------------------------------------------------- -if (require.main === module) { - console.log('Starting webhook jobs...'); - webhookDispatcherJob(Link, User, WebhookQueue, fetchSHAForUpstreamBranch); -} - // ---------------------------------------------------------------------------- // Passport stuff // ---------------------------------------------------------------------------- @@ -207,9 +198,12 @@ app.get('/v1/whoami', whoami); // GET all links app.get('/v1/links', bodyParser.json(), assertLoggedIn, analyticsForRoute, route(linksList, [Link])); -// GET a given link +// GET a single link app.get('/v1/links/:id', bodyParser.json(), assertLoggedIn, analyticsForRoute, route(linksGet, [Link])); +// GET all operations associated with a single link +app.get('/v1/links/:id/operations', bodyParser.json(), assertLoggedIn, analyticsForRoute, route(linksGetOperations, [Link, WebhookStatusStore])); + // Create a new link app.post('/v1/links', bodyParser.json(), assertLoggedIn, analyticsForRoute, route(linksCreate, [Link]));