forked from heroku-examples/node-workers-example
-
Notifications
You must be signed in to change notification settings - Fork 0
/
worker.js
49 lines (39 loc) · 1.63 KB
/
worker.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
let throng = require('throng');
let Queue = require("bull");
// Connect to a local redis intance locally, and the Heroku-provided URL in production
let REDIS_URL = process.env.REDIS_URL || "redis://127.0.0.1:6379";
// Spin up multiple processes to handle jobs to take advantage of more CPU cores
// See: https://devcenter.heroku.com/articles/node-concurrency for more info
let workers = process.env.WEB_CONCURRENCY || 2;
// The maxium number of jobs each worker should process at once. This will need
// to be tuned for your application. If each job is mostly waiting on network
// responses it can be much higher. If each job is CPU-intensive, it might need
// to be much lower.
let maxJobsPerWorker = 50;
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function start() {
// Connect to the named work queue
let workQueue = new Queue('work', REDIS_URL);
workQueue.process(maxJobsPerWorker, async (job) => {
// This is an example job that just slowly reports on progress
// while doing no work. Replace this with your own job logic.
let progress = 0;
// throw an error 5% of the time
if (Math.random() < 0.05) {
throw new Error("This job failed!")
}
while (progress < 100) {
await sleep(50);
progress += 1;
job.progress(progress)
}
// A job can return values that will be stored in Redis as JSON
// This return value is unused in this demo application.
return { value: "This will be stored" };
});
}
// Initialize the clustered worker process
// See: https://devcenter.heroku.com/articles/node-concurrency for more info
throng({ workers, start });