function run(opts, cb) { const cbPassedIn = (typeof cb === 'function') cb = cb || noop const tracker = new EE() const latencies = new Histogram(1, 10000, 5) const requests = new Histogram(1, 1000000, 3) const throughput = new Histogram(1, 100000000000, 1) const statusCodes = [ 0, // 1xx 0, // 2xx 0, // 3xx 0, // 4xx 0 // 5xx ] opts = xtend(defaultOptions, opts) // do error checking, if error, return if (checkOptsForErrors()) return // set tracker.opts here, so throwing over invalid opts and setting defaults etc. // is done tracker.opts = opts if (opts.url.indexOf('http') !== 0) opts.url = 'http://' + opts.url const url = URL.parse(opts.url) let counter = 0 let bytes = 0 let errors = 0 let timeouts = 0 let totalBytes = 0 let totalRequests = 0 let totalCompletedRequests = 0 let amount = opts.amount let stop = false let restart = true let numRunning = opts.connections let startTime = Date.now() // copy over fields so that the client // performs the right HTTP requests url.pipelining = opts.pipelining url.method = opts.method url.body = opts.body url.headers = opts.headers url.setupClient = opts.setupClient url.timeout = opts.timeout url.requests = opts.requests url.reconnectRate = opts.reconnectRate url.responseMax = amount || opts.maxConnectionRequests || opts.maxOverallRequests url.rate = opts.connectionRate || opts.overallRate url.idReplacement = opts.idReplacement let clients = [] initialiseClients(clients) if (!amount) { var stopTimer = setTimeout(() => { stop = true }, opts.duration * 1000) } tracker.stop = () => { stop = true restart = false } const interval = reInterval(tickInterval, 1000) // put the start emit in a setImmediate so trackers can be added, etc. setImmediate(() => { tracker.emit('start') }) function tickInterval () { totalBytes += bytes totalCompletedRequests += counter requests.record(counter) throughput.record(bytes) counter = 0 bytes = 0 tracker.emit('tick') if (stop) { if (stopTimer) clearTimeout(stopTimer) interval.clear() clients.forEach((client) => client.destroy()) let result = { title: opts.title, url: opts.url, requests: histAsObj(requests, totalCompletedRequests), latency: addPercentiles(latencies, histAsObj(latencies)), throughput: histAsObj(throughput, totalBytes), errors: errors, timeouts: timeouts, duration: Math.round((Date.now() - startTime) / 1000), start: new Date(startTime), finish: new Date(), connections: opts.connections, pipelining: opts.pipelining, 'non2xx': statusCodes[0] + statusCodes[2] + statusCodes[3] + statusCodes[4] } result.requests.sent = totalRequests statusCodes.forEach((code, index) => { result[(index + 1) + 'xx'] = code }) tracker.emit('done', result) if (!opts.forever) cb(null, result) // the restart function setImmediate(() => { if (opts.forever && restart) { stop = false stopTimer = setTimeout(() => { stop = true }, opts.duration * 1000) errors = 0 timeouts = 0 totalBytes = 0 totalRequests = 0 totalCompletedRequests = 0 statusCodes.fill(0) requests.reset() latencies.reset() throughput.reset() startTime = Date.now() // reinitialise clients clients = [] initialiseClients(clients) interval.reschedule(1000) tracker.emit('start') } }) } } function initialiseClients (clients) { for (let i = 0; i < opts.connections; i++) { if (!amount && !opts.maxConnectionRequests && opts.maxOverallRequests) { url.responseMax = distributeNums(opts.maxOverallRequests, i) } if (amount) { url.responseMax = distributeNums(amount, i) } if (!opts.connectionRate & ...
n/a
function Client(opts) { if (!(this instanceof Client)) { return new Client(opts) } opts.setupClient = opts.setupClient || noop opts.pipelining = opts.pipelining || 1 opts.port = opts.port || 80 this.opts = opts this.timeout = (opts.timeout || 10) * 1000 this.secure = opts.protocol === 'https:' if (this.secure && this.opts.port === 80) this.opts.port = 443 this.parser = new HTTPParser(HTTPParser.RESPONSE) this.requestIterator = new RequestIterator(opts.requests, opts) this.reqsMade = 0 // used for request limiting this.responseMax = opts.responseMax // used for rate limiting this.reqsMadeThisSecond = 0 this.rate = opts.rate // used for forcing reconnects this.reconnectRate = opts.reconnectRate this.resData = new Array(opts.pipelining) for (var i = 0; i < this.resData.length; i++) { this.resData[i] = { bytes: 0, headers: {}, startTime: [0, 0] } } // cer = current expected response this.cer = 0 this.destroyed = false opts.setupClient(this) const handleTimeout = () => { // all pipelined requests have timed out here this.resData.forEach(() => this.emit('timeout')) this.cer = 0 this._destroyConnection() // timeout has already occured, need to set a new timeoutTicker this.timeoutTicker = retimer(handleTimeout, this.timeout) this._connect() } if (this.rate) { this.rateInterval = setInterval(() => { this.reqsMadeThisSecond = 0 if (this.paused) this._doRequest(this.cer) this.paused = false }, 1000) } this.timeoutTicker = retimer(handleTimeout, this.timeout) this.parser[HTTPParser.kOnHeaders] = () => {} this.parser[HTTPParser.kOnHeadersComplete] = (opts) => { this.emit('headers', opts) this.resData[this.cer].headers = opts } this.parser[HTTPParser.kOnBody] = (body) => { this.emit('body', body) } this.parser[HTTPParser.kOnMessageComplete] = () => { let end = process.hrtime(this.resData[this.cer].startTime) let responseTime = end[0] * 1e3 + end[1] / 1e6 this.emit('response', this.resData[this.cer].headers.statusCode, this.resData[this.cer].bytes, responseTime) this.resData[this.cer].bytes = 0 if (!this.destroyed && this.reconnectRate && this.reqsMade % this.reconnectRate === 0) { return this._resetConnection() } this.cer = this.cer === opts.pipelining - 1 ? 0 : this.cer++ this._doRequest(this.cer) } this._connect() }
n/a
function parseArguments(argvs) { const argv = minimist(argvs, { boolean: ['json', 'n', 'help', 'renderLatencyTable', 'renderProgressBar', 'forever', 'idReplacement'], alias: { connections: 'c', pipelining: 'p', timeout: 't', duration: 'd', amount: 'a', json: 'j', renderLatencyTable: ['l', 'latency'], method: 'm', headers: ['H', 'header'], body: 'b', bailout: 'B', input: 'i', maxConnectionRequests: 'M', maxOverallRequests: 'O', connectionRate: 'r', overallRate: 'R', reconnectRate: 'D', renderProgressBar: 'progress', title: 'T', version: 'v', forever: 'f', idReplacement: 'I', help: 'h' }, default: { connections: 10, timeout: 10, pipelining: 1, duration: 10, reconnectRate: 0, renderLatencyTable: false, renderProgressBar: true, json: false, forever: false, method: 'GET', idReplacement: false } }) argv.url = argv._[0] // support -n to disable the progress bar and results table if (argv.n) { argv.renderProgressBar = false argv.renderResultsTable = false } if (argv.version) { console.log('autocannon', 'v' + require('./package').version) console.log('node', process.version) return } if (!argv.url || argv.help) { console.error(help) return } if (argv.input) { argv.body = fs.readFileSync(argv.input) } if (argv.headers) { if (!Array.isArray(argv.headers)) { argv.headers = [argv.headers] } argv.headers = argv.headers.reduce((obj, header) => { const index = header.indexOf('=') obj[header.slice(0, index)] = header.slice(index + 1) return obj }, {}) } return argv }
n/a
function RequestIterator(requests, defaults) { if (!(this instanceof RequestIterator)) { return new RequestIterator(requests, defaults) } this.reqDefaults = defaults this.requestBuilder = requestBuilder(defaults) this.setRequests(requests) }
n/a
function run(opts, cb) { const cbPassedIn = (typeof cb === 'function') cb = cb || noop const tracker = new EE() const latencies = new Histogram(1, 10000, 5) const requests = new Histogram(1, 1000000, 3) const throughput = new Histogram(1, 100000000000, 1) const statusCodes = [ 0, // 1xx 0, // 2xx 0, // 3xx 0, // 4xx 0 // 5xx ] opts = xtend(defaultOptions, opts) // do error checking, if error, return if (checkOptsForErrors()) return // set tracker.opts here, so throwing over invalid opts and setting defaults etc. // is done tracker.opts = opts if (opts.url.indexOf('http') !== 0) opts.url = 'http://' + opts.url const url = URL.parse(opts.url) let counter = 0 let bytes = 0 let errors = 0 let timeouts = 0 let totalBytes = 0 let totalRequests = 0 let totalCompletedRequests = 0 let amount = opts.amount let stop = false let restart = true let numRunning = opts.connections let startTime = Date.now() // copy over fields so that the client // performs the right HTTP requests url.pipelining = opts.pipelining url.method = opts.method url.body = opts.body url.headers = opts.headers url.setupClient = opts.setupClient url.timeout = opts.timeout url.requests = opts.requests url.reconnectRate = opts.reconnectRate url.responseMax = amount || opts.maxConnectionRequests || opts.maxOverallRequests url.rate = opts.connectionRate || opts.overallRate url.idReplacement = opts.idReplacement let clients = [] initialiseClients(clients) if (!amount) { var stopTimer = setTimeout(() => { stop = true }, opts.duration * 1000) } tracker.stop = () => { stop = true restart = false } const interval = reInterval(tickInterval, 1000) // put the start emit in a setImmediate so trackers can be added, etc. setImmediate(() => { tracker.emit('start') }) function tickInterval () { totalBytes += bytes totalCompletedRequests += counter requests.record(counter) throughput.record(bytes) counter = 0 bytes = 0 tracker.emit('tick') if (stop) { if (stopTimer) clearTimeout(stopTimer) interval.clear() clients.forEach((client) => client.destroy()) let result = { title: opts.title, url: opts.url, requests: histAsObj(requests, totalCompletedRequests), latency: addPercentiles(latencies, histAsObj(latencies)), throughput: histAsObj(throughput, totalBytes), errors: errors, timeouts: timeouts, duration: Math.round((Date.now() - startTime) / 1000), start: new Date(startTime), finish: new Date(), connections: opts.connections, pipelining: opts.pipelining, 'non2xx': statusCodes[0] + statusCodes[2] + statusCodes[3] + statusCodes[4] } result.requests.sent = totalRequests statusCodes.forEach((code, index) => { result[(index + 1) + 'xx'] = code }) tracker.emit('done', result) if (!opts.forever) cb(null, result) // the restart function setImmediate(() => { if (opts.forever && restart) { stop = false stopTimer = setTimeout(() => { stop = true }, opts.duration * 1000) errors = 0 timeouts = 0 totalBytes = 0 totalRequests = 0 totalCompletedRequests = 0 statusCodes.fill(0) requests.reset() latencies.reset() throughput.reset() startTime = Date.now() // reinitialise clients clients = [] initialiseClients(clients) interval.reschedule(1000) tracker.emit('start') } }) } } function initialiseClients (clients) { for (let i = 0; i < opts.connections; i++) { if (!amount && !opts.maxConnectionRequests && opts.maxOverallRequests) { url.responseMax = distributeNums(opts.maxOverallRequests, i) } if (amount) { url.responseMax = distributeNums(amount, i) } if (!opts.connectionRate & ...
n/a
function start(argv) { if (!argv) { // we are printing the help return } const tracker = run(argv) tracker.on('done', (result) => { if (argv.json) { console.log(JSON.stringify(result)) } }) tracker.on('error', (err) => { if (err) { throw err } }) // if not rendering json, or if std isn't a tty, track progress if (!argv.json || !process.stdout.isTTY) track(tracker, argv) process.once('SIGINT', () => { tracker.stop() }) }
n/a
function track(instance, opts) { if (!instance) { throw new Error('instance required for tracking') } opts = xtend(defaults, opts) const chalk = new Chalk.constructor({ enabled: testColorSupport({ stream: opts.outputStream }) }) // this default needs to be set after chalk is setup, because chalk is now local to this func opts.progressBarString = opts.progressBarString || `${chalk.green('running')} [:bar] :percent` const iOpts = instance.opts let durationProgressBar let amountProgressBar instance.on('start', () => { if (opts.renderProgressBar) { let msg = `${iOpts.connections} connections` if (iOpts.pipelining > 1) { msg += ` with ${iOpts.pipelining} pipelining factor` } if (!iOpts.amount) { logToStream(`Running ${iOpts.duration}s test @ ${iOpts.url}\n${msg}\n`) durationProgressBar = trackDuration(instance, opts, iOpts) } else { logToStream(`Running ${iOpts.amount} requests test @ ${iOpts.url}\n${msg}\n`) amountProgressBar = trackAmount(instance, opts, iOpts) } } }) // add listeners for progress bar to instance here so they aren't // added on restarting, causing listener leaks // note: Attempted to curry the functions below, but that breaks the functionality // as they use the scope/closure of the progress bar variables to allow them to be reset if (opts.renderProgressBar && opts.outputStream.isTTY) { if (!iOpts.amount) { // duration progress bar instance.on('tick', () => { durationProgressBar.tick() }) instance.on('done', () => { durationProgressBar.tick(iOpts.duration - 1) }) process.once('SIGINT', () => { durationProgressBar.tick(iOpts.duration - 1) }) } else { // amount progress bar instance.on('response', () => { amountProgressBar.tick() }) instance.on('reqError', () => { amountProgressBar.tick() }) instance.on('done', () => { amountProgressBar.tick(iOpts.amount - 1) }) process.once('SIGINT', () => { amountProgressBar.tick(iOpts.amount - 1) }) } } instance.on('done', (result) => { // the code below this `if` just renders the results table... // if the user doesn't want to render the table, we can just return early if (!opts.renderResultsTable) return const out = table([ asColor(chalk.cyan, ['Stat', 'Avg', 'Stdev', 'Max']), asRow(chalk.bold('Latency (ms)'), result.latency), asRow(chalk.bold('Req/Sec'), result.requests), asRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput)) ], { border: getBorderCharacters('void'), columnDefault: { paddingLeft: 0, paddingRight: 1 }, drawHorizontalLine: () => false }) logToStream(out) if (opts.renderLatencyTable) { const latency = table([ asColor(chalk.cyan, ['Percentile', 'Latency (ms)']) ].concat(percentiles.map((perc) => { const key = ('p' + perc).replace('.', '') return [ chalk.bold('' + perc), result.latency[key] ] })), { border: getBorderCharacters('void'), columnDefault: { paddingLeft: 0, paddingRight: 6 }, drawHorizontalLine: () => false }) logToStream(latency) } if (result.non2xx) { logToStream(`${result['2xx']} 2xx responses, ${result.non2xx} non 2xx responses`) } logToStream(`${format(result.requests.total)} requests in ${result.duration}s, ${prettyBytes(result.throughput.total)} read`) if (result.errors) { logToStream(`${format(result.errors)} errors (${format(result.timeouts)} timeouts)`) } }) function logToStream (msg) { opts.outputStream.write(msg + '\n') } }
...
* `forever`: A `Boolean` which allows you to setup an instance of autocannon that restarts indefinatly after emiting results with
the `done` event. Useful for efficiently restarting your instance. To stop running forever, you must cause a `SIGINT` or call the
`.stop()` function on your instance. _OPTIONAL_ default: `false`
* `cb`: The callback which is called on completion of a benchmark. Takes the following params. _OPTIONAL_.
* `err`: If there was an error encountered with the run.
* `results`: The results of the run.
**Returns** an instance/event emitter for tracking progress, etc.
### autocannon.track(instance[, opts])
Track the progress of your autocannon, programmatically.
* `instance`: The instance of autocannon. _REQUIRED_.
* `opts`: Configuration options for tracking. This can have the following attibutes. _OPTIONAL_.
* `outputStream`: The stream to output to. default: `process.stderr`.
* `renderProgressBar`: A truthy value to enable the rendering of the progress bar. default: `true`.
...
function Client(opts) { if (!(this instanceof Client)) { return new Client(opts) } opts.setupClient = opts.setupClient || noop opts.pipelining = opts.pipelining || 1 opts.port = opts.port || 80 this.opts = opts this.timeout = (opts.timeout || 10) * 1000 this.secure = opts.protocol === 'https:' if (this.secure && this.opts.port === 80) this.opts.port = 443 this.parser = new HTTPParser(HTTPParser.RESPONSE) this.requestIterator = new RequestIterator(opts.requests, opts) this.reqsMade = 0 // used for request limiting this.responseMax = opts.responseMax // used for rate limiting this.reqsMadeThisSecond = 0 this.rate = opts.rate // used for forcing reconnects this.reconnectRate = opts.reconnectRate this.resData = new Array(opts.pipelining) for (var i = 0; i < this.resData.length; i++) { this.resData[i] = { bytes: 0, headers: {}, startTime: [0, 0] } } // cer = current expected response this.cer = 0 this.destroyed = false opts.setupClient(this) const handleTimeout = () => { // all pipelined requests have timed out here this.resData.forEach(() => this.emit('timeout')) this.cer = 0 this._destroyConnection() // timeout has already occured, need to set a new timeoutTicker this.timeoutTicker = retimer(handleTimeout, this.timeout) this._connect() } if (this.rate) { this.rateInterval = setInterval(() => { this.reqsMadeThisSecond = 0 if (this.paused) this._doRequest(this.cer) this.paused = false }, 1000) } this.timeoutTicker = retimer(handleTimeout, this.timeout) this.parser[HTTPParser.kOnHeaders] = () => {} this.parser[HTTPParser.kOnHeadersComplete] = (opts) => { this.emit('headers', opts) this.resData[this.cer].headers = opts } this.parser[HTTPParser.kOnBody] = (body) => { this.emit('body', body) } this.parser[HTTPParser.kOnMessageComplete] = () => { let end = process.hrtime(this.resData[this.cer].startTime) let responseTime = end[0] * 1e3 + end[1] / 1e6 this.emit('response', this.resData[this.cer].headers.statusCode, this.resData[this.cer].bytes, responseTime) this.resData[this.cer].bytes = 0 if (!this.destroyed && this.reconnectRate && this.reqsMade % this.reconnectRate === 0) { return this._resetConnection() } this.cer = this.cer === opts.pipelining - 1 ? 0 : this.cer++ this._doRequest(this.cer) } this._connect() }
n/a
function EventEmitter() { EventEmitter.init.call(this); }
n/a
_connect = function () { if (this.secure) { this.conn = tls.connect(this.opts.port, this.opts.hostname, { rejectUnauthorized: false }) } else { this.conn = net.connect(this.opts.port, this.opts.hostname) } this.conn.on('error', (error) => { this.emit('connError', error) if (!this.destroyed) this._connect() }) this.conn.on('data', (chunk) => { this.resData[this.cer].bytes += chunk.length this.parser.execute(chunk) }) this.conn.on('end', () => { if (!this.destroyed) this._connect() }) for (let i = 0; i < this.opts.pipelining; i++) { this._doRequest(i) } }
...
this.resData.forEach(() => this.emit('timeout'))
this.cer = 0
this._destroyConnection()
// timeout has already occured, need to set a new timeoutTicker
this.timeoutTicker = retimer(handleTimeout, this.timeout)
this._connect()
}
if (this.rate) {
this.rateInterval = setInterval(() => {
this.reqsMadeThisSecond = 0
if (this.paused) this._doRequest(this.cer)
this.paused = false
...
_destroyConnection = function () { this.conn.removeAllListeners('error') this.conn.removeAllListeners('end') this.conn.on('error', () => {}) this.conn.destroy() }
...
opts.setupClient(this)
const handleTimeout = () => {
// all pipelined requests have timed out here
this.resData.forEach(() => this.emit('timeout'))
this.cer = 0
this._destroyConnection()
// timeout has already occured, need to set a new timeoutTicker
this.timeoutTicker = retimer(handleTimeout, this.timeout)
this._connect()
}
...
_doRequest = function (rpi) { if (!this.rate || (this.rate && this.reqsMadeThisSecond++ < this.rate)) { if (!this.destroyed && this.responseMax && this.reqsMade >= this.responseMax) { return this.destroy() } this.emit('request') this.resData[rpi].startTime = process.hrtime() this.conn.write(this.requestIterator.move()) this.timeoutTicker.reschedule(this.timeout) this.reqsMade++ } else { this.paused = true } }
...
this._connect()
}
if (this.rate) {
this.rateInterval = setInterval(() => {
this.reqsMadeThisSecond = 0
if (this.paused) this._doRequest(this.cer)
this.paused = false
}, 1000)
}
this.timeoutTicker = retimer(handleTimeout, this.timeout)
this.parser[HTTPParser.kOnHeaders] = () => {}
this.parser[HTTPParser.kOnHeadersComplete] = (opts) => {
...
_okayToUpdateCheck = function () { if (this.opts.pipelining > 1) { throw new Error('cannot update requests when the piplining factor is greater than 1') } }
...
}
Client.prototype.getRequestBuffer = function (newHeaders) {
return this.requestIterator.currentRequest.requestBuffer
}
Client.prototype.setHeaders = function (newHeaders) {
this._okayToUpdateCheck()
this.requestIterator.setHeaders(newHeaders)
}
Client.prototype.setBody = function (newBody) {
this._okayToUpdateCheck()
this.requestIterator.setBody(newBody)
}
...
_resetConnection = function () { this._destroyConnection() this._connect() }
...
this.parser[HTTPParser.kOnMessageComplete] = () => {
let end = process.hrtime(this.resData[this.cer].startTime)
let responseTime = end[0] * 1e3 + end[1] / 1e6
this.emit('response', this.resData[this.cer].headers.statusCode, this.resData[this.cer].bytes, responseTime)
this.resData[this.cer].bytes = 0
if (!this.destroyed && this.reconnectRate && this.reqsMade % this.reconnectRate === 0) {
return this._resetConnection()
}
this.cer = this.cer === opts.pipelining - 1 ? 0 : this.cer++
this._doRequest(this.cer)
}
this._connect()
...
destroy = function () { if (!this.destroyed) { this.destroyed = true this.timeoutTicker.clear() if (this.rate) clearInterval(this.rateInterval) this.emit('done') this._destroyConnection() } }
...
}
}
// rpi = request pipelining index
Client.prototype._doRequest = function (rpi) {
if (!this.rate || (this.rate && this.reqsMadeThisSecond++ < this.rate)) {
if (!this.destroyed && this.responseMax && this.reqsMade >= this.responseMax) {
return this.destroy()
}
this.emit('request')
this.resData[rpi].startTime = process.hrtime()
this.conn.write(this.requestIterator.move())
this.timeoutTicker.reschedule(this.timeout)
this.reqsMade++
} else {
...
getRequestBuffer = function (newHeaders) { return this.requestIterator.currentRequest.requestBuffer }
n/a
setBody = function (newBody) { this._okayToUpdateCheck() this.requestIterator.setBody(newBody) }
...
* `error`: Emitted if there is an error during the setup phase of autocannon.
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator
is currently on. `body` should be a `String` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
...
setHeaders = function (newHeaders) { this._okayToUpdateCheck() this.requestIterator.setHeaders(newHeaders) }
...
* `reqError`: Emitted in the case of a request error e.g. a timeout.
* `error`: Emitted if there is an error during the setup phase of autocannon.
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client
iterator is currently on. `headers` should be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
...
setHeadersAndBody = function (newHeaders, newBody) { this._okayToUpdateCheck() this.requestIterator.setHeadersAndBody(newHeaders, newBody) }
...
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and
body this client iterator is currently on.`headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
...
setRequest = function (newRequest) { this._okayToUpdateCheck() this.requestIterator.setRequest(newRequest) }
...
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this
client iterator is currently on. Can have `headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into
the autocannon instance when it was created. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
* `headers`: Emitted when a request sent from this client has received the headers of its reply. This received an `Object` as the
parameter.
...
setRequests = function (newRequests) { this._okayToUpdateCheck() this.requestIterator.setRequests(newRequests) }
...
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that
was passed into the instance on initiation. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
* `headers`: Emitted when a request sent from this client has received the headers of its reply. This received an `Object` as the
parameter.
* `body`: Emitted when a request sent from this client has received the body of a reply. This receives a `Buffer` as the parameter
.
...
function RequestIterator(requests, defaults) { if (!(this instanceof RequestIterator)) { return new RequestIterator(requests, defaults) } this.reqDefaults = defaults this.requestBuilder = requestBuilder(defaults) this.setRequests(requests) }
n/a
function Object() { [native code] }
n/a
move = function () { // get the current buffer and proceed to next request let ret = this.currentRequest.requestBuffer this.nextRequest() return this.reqDefaults.idReplacement ? new Buffer(ret.toString().replace(/\[<id>\]/g, hyperid())) : ret }
...
Client.prototype._doRequest = function (rpi) {
if (!this.rate || (this.rate && this.reqsMadeThisSecond++ < this.rate)) {
if (!this.destroyed && this.responseMax && this.reqsMade >= this.responseMax) {
return this.destroy()
}
this.emit('request')
this.resData[rpi].startTime = process.hrtime()
this.conn.write(this.requestIterator.move())
this.timeoutTicker.reschedule(this.timeout)
this.reqsMade++
} else {
this.paused = true
}
}
...
nextRequest = function () { ++this.currentRequestIndex this.currentRequestIndex = this.currentRequestIndex < this.requests.length ? this.currentRequestIndex : 0 this.currentRequest = this.requests[this.currentRequestIndex] return this.currentRequest }
...
this.currentRequestIndex = this.currentRequestIndex < this.requests.length ? this.currentRequestIndex : 0
this.currentRequest = this.requests[this.currentRequestIndex]
return this.currentRequest
}
RequestIterator.prototype.nextRequestBuffer = function () {
// get next request
this.nextRequest()
return this.currentRequest.requestBuffer
}
RequestIterator.prototype.move = function () {
// get the current buffer and proceed to next request
let ret = this.currentRequest.requestBuffer
this.nextRequest()
...
nextRequestBuffer = function () { // get next request this.nextRequest() return this.currentRequest.requestBuffer }
n/a
rebuildRequest = function () { this.currentRequest.requestBuffer = this.requestBuilder(this.currentRequest) this.requests[this.currentRequestIndex] = this.currentRequest }
...
this.requests.forEach((request) => {
request.requestBuffer = this.requestBuilder(request)
})
}
RequestIterator.prototype.setHeaders = function (newHeaders) {
this.currentRequest.headers = newHeaders || {}
this.rebuildRequest()
}
RequestIterator.prototype.setBody = function (newBody) {
this.currentRequest.body = newBody || new Buffer(0)
this.rebuildRequest()
}
...
rebuildRequests = function () { this.requests.forEach((request) => { request.requestBuffer = this.requestBuilder(request) }) }
...
: ret
}
RequestIterator.prototype.setRequests = function (newRequests) {
this.requests = newRequests || [{}]
this.currentRequestIndex = 0
this.currentRequest = this.requests[0]
this.rebuildRequests()
}
RequestIterator.prototype.rebuildRequests = function () {
this.requests.forEach((request) => {
request.requestBuffer = this.requestBuilder(request)
})
}
...
setBody = function (newBody) { this.currentRequest.body = newBody || new Buffer(0) this.rebuildRequest() }
...
* `error`: Emitted if there is an error during the setup phase of autocannon.
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator
is currently on. `body` should be a `String` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
...
setHeaders = function (newHeaders) { this.currentRequest.headers = newHeaders || {} this.rebuildRequest() }
...
* `reqError`: Emitted in the case of a request error e.g. a timeout.
* `error`: Emitted if there is an error during the setup phase of autocannon.
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client
iterator is currently on. `headers` should be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
...
setHeadersAndBody = function (newHeaders, newBody) { this.currentRequest.headers = newHeaders || {} this.currentRequest.body = newBody || new Buffer(0) this.rebuildRequest() }
...
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and
body this client iterator is currently on.`headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
...
setRequest = function (newRequest) { this.currentRequest = newRequest || {} this.rebuildRequest() }
...
### `Client` API
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this
client iterator is currently on. Can have `headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into
the autocannon instance when it was created. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that was passed into the instance on initiation
. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
* `headers`: Emitted when a request sent from this client has received the headers of its reply. This received an `Object` as the
parameter.
...
setRequests = function (newRequests) { this.requests = newRequests || [{}] this.currentRequestIndex = 0 this.currentRequest = this.requests[0] this.rebuildRequests() }
...
This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance
. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and
their params listed below.
* `client.setHeaders(headers)`: Used to modify the headers of the request this client iterator is currently on. `headers` should
be an `Object`, or `undefined` if you want to remove your headers.
* `client.setBody(body)`: Used to modify the body of the request this client iterator is currently on. `body` should be a `String
` or `Buffer`, or `undefined` if you want to remove the body.
* `client.setHeadersAndBody(headers, body)`: Used to modify the both the headers and body this client iterator is currently on.`
headers` and `body` should take the same form as above.
* `client.setRequest(request)`: Used to modify the both the entire request that this client iterator is currently on. Can have `
headers`, `body`, `method`, or `path` as attributes. Defaults to the values passed into the autocannon instance when it was created
. `Note: call this when modifying multiple request values for faster encoding`
* `client.setRequests(newRequests)`: Used to overwrite the entire requests array that
was passed into the instance on initiation. `Note: call this when modifying multiple requests for faster encoding`
### `Client` events
The events a `Client` can emit are listed here:
* `headers`: Emitted when a request sent from this client has received the headers of its reply. This received an `Object` as the
parameter.
* `body`: Emitted when a request sent from this client has received the body of a reply. This receives a `Buffer` as the parameter
.
...
function run(opts, cb) { const cbPassedIn = (typeof cb === 'function') cb = cb || noop const tracker = new EE() const latencies = new Histogram(1, 10000, 5) const requests = new Histogram(1, 1000000, 3) const throughput = new Histogram(1, 100000000000, 1) const statusCodes = [ 0, // 1xx 0, // 2xx 0, // 3xx 0, // 4xx 0 // 5xx ] opts = xtend(defaultOptions, opts) // do error checking, if error, return if (checkOptsForErrors()) return // set tracker.opts here, so throwing over invalid opts and setting defaults etc. // is done tracker.opts = opts if (opts.url.indexOf('http') !== 0) opts.url = 'http://' + opts.url const url = URL.parse(opts.url) let counter = 0 let bytes = 0 let errors = 0 let timeouts = 0 let totalBytes = 0 let totalRequests = 0 let totalCompletedRequests = 0 let amount = opts.amount let stop = false let restart = true let numRunning = opts.connections let startTime = Date.now() // copy over fields so that the client // performs the right HTTP requests url.pipelining = opts.pipelining url.method = opts.method url.body = opts.body url.headers = opts.headers url.setupClient = opts.setupClient url.timeout = opts.timeout url.requests = opts.requests url.reconnectRate = opts.reconnectRate url.responseMax = amount || opts.maxConnectionRequests || opts.maxOverallRequests url.rate = opts.connectionRate || opts.overallRate url.idReplacement = opts.idReplacement let clients = [] initialiseClients(clients) if (!amount) { var stopTimer = setTimeout(() => { stop = true }, opts.duration * 1000) } tracker.stop = () => { stop = true restart = false } const interval = reInterval(tickInterval, 1000) // put the start emit in a setImmediate so trackers can be added, etc. setImmediate(() => { tracker.emit('start') }) function tickInterval () { totalBytes += bytes totalCompletedRequests += counter requests.record(counter) throughput.record(bytes) counter = 0 bytes = 0 tracker.emit('tick') if (stop) { if (stopTimer) clearTimeout(stopTimer) interval.clear() clients.forEach((client) => client.destroy()) let result = { title: opts.title, url: opts.url, requests: histAsObj(requests, totalCompletedRequests), latency: addPercentiles(latencies, histAsObj(latencies)), throughput: histAsObj(throughput, totalBytes), errors: errors, timeouts: timeouts, duration: Math.round((Date.now() - startTime) / 1000), start: new Date(startTime), finish: new Date(), connections: opts.connections, pipelining: opts.pipelining, 'non2xx': statusCodes[0] + statusCodes[2] + statusCodes[3] + statusCodes[4] } result.requests.sent = totalRequests statusCodes.forEach((code, index) => { result[(index + 1) + 'xx'] = code }) tracker.emit('done', result) if (!opts.forever) cb(null, result) // the restart function setImmediate(() => { if (opts.forever && restart) { stop = false stopTimer = setTimeout(() => { stop = true }, opts.duration * 1000) errors = 0 timeouts = 0 totalBytes = 0 totalRequests = 0 totalCompletedRequests = 0 statusCodes.fill(0) requests.reset() latencies.reset() throughput.reset() startTime = Date.now() // reinitialise clients clients = [] initialiseClients(clients) interval.reschedule(1000) tracker.emit('start') } }) } } function initialiseClients (clients) { for (let i = 0; i < opts.connections; i++) { if (!amount && !opts.maxConnectionRequests && opts.maxOverallRequests) { url.responseMax = distributeNums(opts.maxOverallRequests, i) } if (amount) { url.responseMax = distributeNums(amount, i) } if (!opts.connectionRate & ...
n/a
function parseArguments(argvs) { const argv = minimist(argvs, { boolean: ['json', 'n', 'help', 'renderLatencyTable', 'renderProgressBar', 'forever', 'idReplacement'], alias: { connections: 'c', pipelining: 'p', timeout: 't', duration: 'd', amount: 'a', json: 'j', renderLatencyTable: ['l', 'latency'], method: 'm', headers: ['H', 'header'], body: 'b', bailout: 'B', input: 'i', maxConnectionRequests: 'M', maxOverallRequests: 'O', connectionRate: 'r', overallRate: 'R', reconnectRate: 'D', renderProgressBar: 'progress', title: 'T', version: 'v', forever: 'f', idReplacement: 'I', help: 'h' }, default: { connections: 10, timeout: 10, pipelining: 1, duration: 10, reconnectRate: 0, renderLatencyTable: false, renderProgressBar: true, json: false, forever: false, method: 'GET', idReplacement: false } }) argv.url = argv._[0] // support -n to disable the progress bar and results table if (argv.n) { argv.renderProgressBar = false argv.renderResultsTable = false } if (argv.version) { console.log('autocannon', 'v' + require('./package').version) console.log('node', process.version) return } if (!argv.url || argv.help) { console.error(help) return } if (argv.input) { argv.body = fs.readFileSync(argv.input) } if (argv.headers) { if (!Array.isArray(argv.headers)) { argv.headers = [argv.headers] } argv.headers = argv.headers.reduce((obj, header) => { const index = header.indexOf('=') obj[header.slice(0, index)] = header.slice(index + 1) return obj }, {}) } return argv }
n/a
function start(argv) { if (!argv) { // we are printing the help return } const tracker = run(argv) tracker.on('done', (result) => { if (argv.json) { console.log(JSON.stringify(result)) } }) tracker.on('error', (err) => { if (err) { throw err } }) // if not rendering json, or if std isn't a tty, track progress if (!argv.json || !process.stdout.isTTY) track(tracker, argv) process.once('SIGINT', () => { tracker.stop() }) }
n/a
function track(instance, opts) { if (!instance) { throw new Error('instance required for tracking') } opts = xtend(defaults, opts) const chalk = new Chalk.constructor({ enabled: testColorSupport({ stream: opts.outputStream }) }) // this default needs to be set after chalk is setup, because chalk is now local to this func opts.progressBarString = opts.progressBarString || `${chalk.green('running')} [:bar] :percent` const iOpts = instance.opts let durationProgressBar let amountProgressBar instance.on('start', () => { if (opts.renderProgressBar) { let msg = `${iOpts.connections} connections` if (iOpts.pipelining > 1) { msg += ` with ${iOpts.pipelining} pipelining factor` } if (!iOpts.amount) { logToStream(`Running ${iOpts.duration}s test @ ${iOpts.url}\n${msg}\n`) durationProgressBar = trackDuration(instance, opts, iOpts) } else { logToStream(`Running ${iOpts.amount} requests test @ ${iOpts.url}\n${msg}\n`) amountProgressBar = trackAmount(instance, opts, iOpts) } } }) // add listeners for progress bar to instance here so they aren't // added on restarting, causing listener leaks // note: Attempted to curry the functions below, but that breaks the functionality // as they use the scope/closure of the progress bar variables to allow them to be reset if (opts.renderProgressBar && opts.outputStream.isTTY) { if (!iOpts.amount) { // duration progress bar instance.on('tick', () => { durationProgressBar.tick() }) instance.on('done', () => { durationProgressBar.tick(iOpts.duration - 1) }) process.once('SIGINT', () => { durationProgressBar.tick(iOpts.duration - 1) }) } else { // amount progress bar instance.on('response', () => { amountProgressBar.tick() }) instance.on('reqError', () => { amountProgressBar.tick() }) instance.on('done', () => { amountProgressBar.tick(iOpts.amount - 1) }) process.once('SIGINT', () => { amountProgressBar.tick(iOpts.amount - 1) }) } } instance.on('done', (result) => { // the code below this `if` just renders the results table... // if the user doesn't want to render the table, we can just return early if (!opts.renderResultsTable) return const out = table([ asColor(chalk.cyan, ['Stat', 'Avg', 'Stdev', 'Max']), asRow(chalk.bold('Latency (ms)'), result.latency), asRow(chalk.bold('Req/Sec'), result.requests), asRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput)) ], { border: getBorderCharacters('void'), columnDefault: { paddingLeft: 0, paddingRight: 1 }, drawHorizontalLine: () => false }) logToStream(out) if (opts.renderLatencyTable) { const latency = table([ asColor(chalk.cyan, ['Percentile', 'Latency (ms)']) ].concat(percentiles.map((perc) => { const key = ('p' + perc).replace('.', '') return [ chalk.bold('' + perc), result.latency[key] ] })), { border: getBorderCharacters('void'), columnDefault: { paddingLeft: 0, paddingRight: 6 }, drawHorizontalLine: () => false }) logToStream(latency) } if (result.non2xx) { logToStream(`${result['2xx']} 2xx responses, ${result.non2xx} non 2xx responses`) } logToStream(`${format(result.requests.total)} requests in ${result.duration}s, ${prettyBytes(result.throughput.total)} read`) if (result.errors) { logToStream(`${format(result.errors)} errors (${format(result.timeouts)} timeouts)`) } }) function logToStream (msg) { opts.outputStream.write(msg + '\n') } }
...
* `forever`: A `Boolean` which allows you to setup an instance of autocannon that restarts indefinatly after emiting results with
the `done` event. Useful for efficiently restarting your instance. To stop running forever, you must cause a `SIGINT` or call the
`.stop()` function on your instance. _OPTIONAL_ default: `false`
* `cb`: The callback which is called on completion of a benchmark. Takes the following params. _OPTIONAL_.
* `err`: If there was an error encountered with the run.
* `results`: The results of the run.
**Returns** an instance/event emitter for tracking progress, etc.
### autocannon.track(instance[, opts])
Track the progress of your autocannon, programmatically.
* `instance`: The instance of autocannon. _REQUIRED_.
* `opts`: Configuration options for tracking. This can have the following attibutes. _OPTIONAL_.
* `outputStream`: The stream to output to. default: `process.stderr`.
* `renderProgressBar`: A truthy value to enable the rendering of the progress bar. default: `true`.
...