TaskGroup
Search…
Comparison
Here are some comparisons between TaskGroup and other popular flow solutions.
    Promises
    Async.js

Promises

Promises execute immediately, support result chaining, fail to catch/isolate uncaught async errors within the promise, and loses/silences errors that were not handled.
TaskGroup execution is controlled, supports concurrency configuration, supports optional result storage, catches/isolates uncaught async errors in environments with domains enabled, and throws unhandled errors to they are not lost/silenced if unhandled.
For example, let's read a directory with 10,000 files and get the stats:
1
// Import
2
const {join} = require('path')
3
const {readdir, stat} = require('fs')
4
5
// Using promises
6
function readdirWithStatsPromise (path) {
7
const result = {}
8
return new Promise(function (resolve, reject) {
9
readdir(path, function (err, files) {
10
if ( err ) return reject(err)
11
Promise.all(
12
files.map((file) => new Promise(function (resolve, reject) {
13
stat(join(path, file), function (err, stat) {
14
if ( err ) return reject(err)
15
result[file] = stat
16
resolve()
17
})
18
}))
19
).then(() => result).catch(reject)
20
})
21
})
22
}
23
readdirWithStatsPromise(process.cwd()).then(console.log).catch(console.error)
24
25
// Using taskgroup
26
const {TaskGroup} = require('taskgroup')
27
function readdirWithStatsTaskGroup (path, next) {
28
const result = {}
29
const tasks = new TaskGroup(`fetch files with stats for ${path}`, {concurrency: 0}).done(function (err) {
30
if ( err ) return next(err)
31
next(null, result)
32
})
33
readdir(path, function (err, files) {
34
files.forEach(function (file) {
35
tasks.addTask(`fetch stat for ${file}`, function (complete) {
36
stat(join(path, file), function (err, stat) {
37
if ( err ) return complete(err)
38
result[file] = stat
39
complete()
40
})
41
})
42
})
43
tasks.run()
44
})
45
}
46
readdirWithStatsTaskGroup(process.cwd(), function (err, result) {
47
if ( err ) return console.error(err)
48
console.log(result)
49
})
50
51
// Using taskgroup, with some cleaning
52
const {TaskGroup, Task} = require('taskgroup')
53
function readdirWithStatsTaskGroup (path, next) {
54
const result = {}
55
TaskGroup.create({
56
concurrency: 0,
57
name: `fetch files with stats for ${path}`,
58
next: function (err) {
59
if ( err ) return next(err)
60
next(null, result)
61
},
62
tasks: files.map(function (file) {
63
return Task.create(`fetch stat for ${file}`, function (complete) {
64
stat(join(path, file), function (err, stat) {
65
if ( err ) return complete(err)
66
result[file] = stat
67
complete()
68
})
69
})
70
})
71
}).run()
72
}
73
readdirWithStatsTaskGroup(process.cwd(), function (err, result) {
74
if ( err ) return console.error(err)
75
console.log(result)
76
})
Copied!
It is worth noting the use of the optional names for the Task and TaskGroups which makes debugging a breeze as when errors occur the names are included in the traces. The testing library Joe that is built on TaskGroup uses this ability to name suites (TaskGroups) and tests (Tasks) as well as to identify which tests and tasks have failed, succeeded, or remain incomplete. It is also worth noting that reading 10,000 files at once would have signficant immediate stress on the machine and may overwhelm the resources and error, crash or lock up less powerful machines. This can easily be catered for in TaskGroup by changing the concurrency from the parallel 0 value to a more reasonable value like 100. To do such concurrency limiting by hand with Promises is incredibly difficult, heck, even doing serial execution (concurrency of 1) requires Array.prototype.reduce trickery:
1
// Using promises serially
2
function readdirWithStatsPromise (path) {
3
const result = {}
4
return new Promise(function (resolve, reject) {
5
readdir(path, function (err, files) {
6
if ( err ) return reject(err)
7
files.reduce((file) => new Promise(function (resolve, reject) {
8
stat(join(path, file), function (err, stat) {
9
if ( err ) return reject(err)
10
result[file] = stat
11
resolve()
12
})
13
}), Primise.resolve()).then(() => result).catch(reject)
14
})
15
})
16
}
17
readdirWithStatsPromise(process.cwd()).then(console.log).catch(console.error)
18
19
// Using promises serially, with some cleaning
20
function readdirWithStatsPromise (path) {
21
const result = {}
22
return new Promise(function (resolve, reject) {
23
readdir(path, function (err, files) {
24
if ( err ) return reject(err)
25
resolve(files)
26
})
27
}).then((files) => {
28
return files.reduce(function (file) {
29
return new Promise(function (resolve, reject) {
30
stat(join(path, file), function (err, stat) {
31
if ( err ) return reject(err)
32
result[file] = stat
33
resolve()
34
})
35
})
36
}, Promise.resolve())
37
})
38
}
39
readdirWithStatsPromise(process.cwd()).then(console.log).catch(console.error)
40
41
42
// Using promises serially, with complete cleaning
43
function readdirPromise (path) {
44
return new Promise(function (resolve, reject) {
45
readdir(path, function (err, files) {
46
if ( err ) return reject(err)
47
resolve(files)
48
})
49
})
50
}
51
function statPromise (path) {
52
return new Promise(function (resolve, reject) {
53
stat(join(path, file), function (err, stat) {
54
if ( err ) return reject(err)
55
result[file] = stat
56
resolve()
57
})
58
})
59
}
60
function statDirectoryPromise (path) {
61
const result = {}
62
return files.reduce(function (file) {
63
return statPromise(join(path, file)).then(function (stat) {
64
result[file] = stat
65
return result
66
})
67
}, Promise.resolve(result))
68
}
69
function readdirWithStatsPromise (path) {
70
return readdirPromise(path).then(statDirectoryPromise(path))
71
}
72
readdirWithStatsPromise(process.cwd()).then(console.log).catch(console.error)
Copied!
That was some significant changes over several iterations of cleaning from our initial approach just to change the concurrency from parallel to serial. And even after all that cleaning, specifying an exact intermediate concurrency like 100, remains incomprehensible to do by hand without extra trickery. Plus, we still lose all the benefits outlined earlier that TaskGroup provides us, such as easier debugging, uniquely named tasks and groups of tasks, easy and specific concurrency, catching asynchronous errors, etc. If you want to get stuff done without needing trickery, TaskGroup is the best you'll find.
For a more detailed discussion about interop between the two, see this discussion. For a result chaining solution based on TaskGroup, see Chainy.js.

Async.js

The biggest advantage and difference of TaskGroup over async.js is that TaskGroup has one uniform API to rule them all, whereas with async.js I found that I was always having to keep referring to the async manual to try and figure out which is the right call for my use case then somehow wrap my head around the async.js way of doing things (which more often than not I couldn't), whereas with TaskGroup I never have that problem as it is one consistent API for all the different use cases.
Let's take a look at what the most common async.js methods would look like in TaskGroup:
1
// ====================================
2
// Series
3
4
// Async
5
async.series([
6
function () {},
7
function (callback) {
8
callback()
9
}
10
], next)
11
12
// TaskGroup via API, using config
13
TaskGroup.create({next, tasks: [
14
function () {},
15
function (callback) {
16
callback()
17
}
18
]}).run()
19
20
// TaskGroup via API, using chaining
21
TaskGroup.create().done(next).addTasks(
22
function () {},
23
function (callback) {
24
callback()
25
}
26
).run()
27
28
// TaskGroup via API
29
var tasks = TaskGroup.create().done(next)
30
tasks.addTask(function () {})
31
tasks.addTask(function (callback) {
32
callback()
33
})
34
tasks.run()
35
36
37
// ====================================
38
// Parallel
39
40
// Async
41
async.parallel([
42
function () {},
43
function (callback) {
44
callback()
45
}
46
], next)
47
48
// TaskGroup via API, using config
49
TaskGroup.create({concurrency: 0, next, tasks: [
50
function () {},
51
function (callback) {
52
callback()
53
}
54
]}).run()
55
56
// TaskGroup via API, using chaining
57
TaskGroup.create({concurrency: 0}).done(next).addTasks(
58
function () {},
59
function (callback) {
60
callback()
61
}
62
).run()
63
64
// TaskGroup via API
65
var tasks = TaskGroup.create({concurrency: 0}).done(next)
66
tasks.addTask(function () {})
67
tasks.addTask(function (callback) {
68
callback()
69
})
70
tasks.run()
71
72
73
// ====================================
74
// Map
75
76
// Async
77
async.map(['file1','file2','file3'], fs.stat, next)
78
79
// TaskGroup via API, using config
80
const tasks = ['file1', 'file2', 'file3'].map((file) => (complete) => fs.stat(file, complete))
81
TaskGroup.create({done, tasks}).run()
82
83
// TaskGroup via API, using chaining
84
TaskGroup.create().done(next).addTasks(
85
['file1', 'file2', 'file3'].map((file) => (complete) => fs.stat(file, complete))
86
).run()
87
88
// TaskGroup via API
89
var tasks = TaskGroup.create().done(next)
90
['file1', 'file2', 'file3'].forEach(function (file) {
91
tasks.addTask(function (complete) {
92
fs.stat(file, complete)
93
})
94
})
95
tasks.run()
Copied!
Another big advantage of TaskGroup over async.js is TaskGroup's ability to add tasks to the group once execution has already started - this is a common use case when creating an application that must perform its actions serially, so using TaskGroup you can create a serial TaskGroup for the application, run it right away, then add the actions to the group as tasks.
A final big advantage of TaskGroup over async.js is TaskGroup's ability to do nested groups, this allowed us to created the Joe Testing Framework & Runner incredibly easily, and because of this functionality Joe will always know which test (task) is associated to which suite (task group), whereas test runners like mocha have to guess (they add the task to the last group, which may not always be the case! especially with dynamically created tests!).
Copy link