Skip to content

Commit

Permalink
Merge pull request #145 from goto-bus-stop/on-port
Browse files Browse the repository at this point in the history
--on-port
  • Loading branch information
mcollina committed Aug 20, 2018
2 parents 7702edb + 0027720 commit 57ce004
Show file tree
Hide file tree
Showing 13 changed files with 328 additions and 51 deletions.
31 changes: 31 additions & 0 deletions README.md
Expand Up @@ -53,6 +53,10 @@ Available options:
The amount of requests to make before exiting the benchmark. If set, duration is ignored.
-S/--socketPath
A path to a Unix Domain Socket or a Windows Named Pipe. A URL is still required in order to send the correct Host header and path.
--on-port
Start the command listed after -- on the command line. When it starts listening on a port,
start sending requests to that port. A URL is still required in order to send requests to
the correct path. The hostname can be omitted, `localhost` will be used by default.
-m/--method METHOD
The http method to use. default: 'GET'.
-t/--timeout NUM
Expand Down Expand Up @@ -202,6 +206,33 @@ Because an autocannon instance is an `EventEmitter`, it emits several events. th
* `reqError`: Emitted in the case of a request error e.g. a timeout.
* `error`: Emitted if there is an error during the setup phase of autocannon.

### results

The results object emitted by `done` and passed to the `autocannon()` callback has these properties:

* `title`: Value of the `title` option passed to `autocannon()`.
* `url`: The URL that was targeted.
* `socketPath`: The UNIX Domain Socket or Windows Named Pipe that was targeted, or `undefined`.
* `requests`: A histogram object containing statistics about the amount of requests that were sent per second.
* `latency`: A histogram object containing statistics about response latency.
* `throughput`: A histogram object containing statistics about the response data throughput per second.
* `duration`: The amount of time the test took, **in seconds**.
* `errors`: The number of connection errors (including timeouts) that occurred.
* `timeouts`: The number of connection timeouts that occurred.
* `start`: A Date object representing when the test started.
* `finish`: A Date object representing when the test ended.
* `connections`: The amount of connections used (value of `opts.connections`).
* `pipelining`: The number of pipelined requests used per connection (value of `opts.pipelining`).
* `non2xx`: The number of non-2xx response status codes received.

The histogram objects for `requests`, `latency` and `throughput` are [hdr-histogram-percentiles-obj](https://github.com/thekemkid/hdr-histogram-percentiles-obj) objects and have this shape:

* `min`: The lowest value for this statistic.
* `max`: The highest value for this statistic.
* `average`: The average (mean) value.
* `stddev`: The standard deviation.
* `p*`: The XXth percentile value for this statistic. The percentile properties are: `p2_5`, `p50`, `p75`, `p90`, `p97_5`, `p99`, `p99_9`, `p99_99`, `p99_999`.

### `Client` API

This object is passed as the first parameter of both the `setupClient` function and the `response` event from an autocannon instance. You can use this to modify the requests you are sending while benchmarking. This is also an `EventEmitter`, with the events and their params listed below.
Expand Down
82 changes: 79 additions & 3 deletions autocannon.js
Expand Up @@ -4,8 +4,13 @@

const minimist = require('minimist')
const fs = require('fs')
const os = require('os')
const net = require('net')
const path = require('path')
const URL = require('url').URL
const spawn = require('child_process').spawn
const managePath = require('manage-path')
const hasAsyncHooks = require('has-async-hooks')
const help = fs.readFileSync(path.join(__dirname, 'help.txt'), 'utf8')
const run = require('./lib/run')
const track = require('./lib/progressTracker')
Expand All @@ -23,7 +28,7 @@ module.exports.parseArguments = parseArguments

function parseArguments (argvs) {
const argv = minimist(argvs, {
boolean: ['json', 'n', 'help', 'renderLatencyTable', 'renderProgressBar', 'forever', 'idReplacement', 'excludeErrorStats'],
boolean: ['json', 'n', 'help', 'renderLatencyTable', 'renderProgressBar', 'forever', 'idReplacement', 'excludeErrorStats', 'onPort'],
alias: {
connections: 'c',
pipelining: 'p',
Expand All @@ -32,6 +37,7 @@ function parseArguments (argvs) {
amount: 'a',
json: 'j',
renderLatencyTable: ['l', 'latency'],
onPort: 'on-port',
method: 'm',
headers: ['H', 'header'],
body: 'b',
Expand Down Expand Up @@ -65,11 +71,16 @@ function parseArguments (argvs) {
method: 'GET',
idReplacement: false,
excludeErrorStats: false
}
},
'--': true
})

argv.url = argv._[0]

if (argv.onPort) {
argv.spawn = argv['--']
}

// support -n to disable the progress bar and results table
if (argv.n) {
argv.renderProgressBar = false
Expand Down Expand Up @@ -100,7 +111,12 @@ function parseArguments (argvs) {

// check that the URL is valid.
try {
new URL(argv.url) // eslint-disable-line no-new
// If --on-port is given, it's acceptable to not have a hostname
if (argv.onPort) {
new URL(argv.url, 'http://localhost') // eslint-disable-line no-new
} else {
new URL(argv.url) // eslint-disable-line no-new
}
} catch (err) {
console.error(err.message)
console.error('')
Expand Down Expand Up @@ -139,9 +155,69 @@ function start (argv) {
return
}

if (argv.onPort) {
if (!hasAsyncHooks()) {
console.error('The --on-port flag requires the async_hooks builtin module, but it is not available. Please upgrade to Node 8.1+.')
process.exit(1)
}

const { socketPath, server } = createChannel((port) => {
const url = new URL(argv.url, `http://localhost:${port}`).href
const opts = Object.assign({}, argv, {
onPort: false,
url: url
})
runTracker(opts, () => {
proc.kill('SIGINT')
server.close()
})
})

// manage-path always uses the $PATH variable, but we can pretend
// that it is equal to $NODE_PATH
const alterPath = managePath({ PATH: process.env.NODE_PATH })
alterPath.unshift(path.join(__dirname, 'lib/preload'))

const proc = spawn(argv.spawn[0], argv.spawn.slice(1), {
stdio: ['ignore', 'inherit', 'inherit'],
env: Object.assign({}, process.env, {
NODE_OPTIONS: ['-r', 'autocannonDetectPort'].join(' ') +
(process.env.NODE_OPTIONS ? ` ${process.env.NODE_OPTIONS}` : ''),
NODE_PATH: alterPath.get(),
AUTOCANNON_SOCKET: socketPath
})
})
} else {
runTracker(argv)
}
}

function createChannel (onport) {
const pipeName = `${process.pid}.autocannon`
const socketPath = process.platform === 'win32'
? `\\\\?\\pipe\\${pipeName}`
: path.join(os.tmpdir(), pipeName)
const server = net.createServer((socket) => {
socket.once('data', (chunk) => {
const port = chunk.toString()
onport(port)
})
})
server.listen(socketPath)
server.on('close', () => {
try {
fs.unlinkSync(socketPath)
} catch (err) {}
})

return { socketPath, server }
}

function runTracker (argv, ondone) {
const tracker = run(argv)

tracker.on('done', (result) => {
if (ondone) ondone()
if (argv.json) {
console.log(JSON.stringify(result))
}
Expand Down
4 changes: 4 additions & 0 deletions help.txt
Expand Up @@ -15,6 +15,10 @@ Available options:
The amount of requests to make before exiting the benchmark. If set, duration is ignored.
-S/--socketPath
A path to a Unix Domain Socket or a Windows Named Pipe. A URL is still required in order to send the correct Host header and path.
--on-port
Start the command listed after -- on the command line. When it starts listening on a port,
start sending requests to that port. A URL is still required in order to send requests to
the correct path. The hostname can be omitted, `localhost` will be used by default.
-m/--method METHOD
The http method to use. default: 'GET'.
-t/--timeout NUM
Expand Down
14 changes: 14 additions & 0 deletions lib/preload/autocannonDetectPort.js
@@ -0,0 +1,14 @@
'use strict'

const onListen = require('on-net-listen')
const net = require('net')

const socket = net.connect(process.env.AUTOCANNON_SOCKET)

onListen(function (addr) {
this.destroy()
const port = Buffer.from(addr.port + '')
socket.write(port)
})

socket.unref()
59 changes: 49 additions & 10 deletions lib/progressTracker.js
Expand Up @@ -75,27 +75,31 @@ function track (instance, opts) {
// if the user doesn't want to render the table, we can just return early
if (!opts.renderResultsTable) return

const out = table([
asColor(chalk.cyan, ['Stat', 'Avg', 'Stdev', 'Max']),
asRow(chalk.bold('Latency (ms)'), result.latency),
asRow(chalk.bold('Req/Sec'), result.requests),
asRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput))
], {
const tableOpts = {
border: getBorderCharacters('void'),
columnDefault: {
paddingLeft: 0,
paddingRight: 1
},
drawHorizontalLine: () => false
})
}

logToStream(out)
logToStream(table([
asColor(chalk.cyan, ['Stat', '2.5%', '50%', '97.5%', '99%', 'Avg', 'Stdev', 'Max']),
asLowRow(chalk.bold('Latency'), asMs(result.latency))
], tableOpts))
logToStream(table([
asColor(chalk.cyan, ['Stat', '1%', '2.5%', '50%', '97.5%', 'Avg', 'Stdev', 'Min']),
asHighRow(chalk.bold('Req/Sec'), result.requests),
asHighRow(chalk.bold('Bytes/Sec'), asBytes(result.throughput))
], tableOpts))
logToStream('Req/Bytes counts sampled once per second.\n')

if (opts.renderLatencyTable) {
const latency = table([
asColor(chalk.cyan, ['Percentile', 'Latency (ms)'])
].concat(percentiles.map((perc) => {
const key = ('p' + perc).replace('.', '')
const key = `p${perc}`.replace('.', '_')
return [
chalk.bold('' + perc),
result.latency[key]
Expand Down Expand Up @@ -160,21 +164,56 @@ function trackAmount (instance, opts, iOpts) {
return progressBar
}

function asRow (name, stat) {
// create a table row for stats where low values is better
function asLowRow (name, stat) {
return [
name,
stat.p2_5,
stat.p50,
stat.p97_5,
stat.p99,
stat.average,
stat.stddev,
typeof stat.max === 'string' ? stat.max : Math.floor(stat.max * 100) / 100
]
}

// create a table row for stats where high values is better
function asHighRow (name, stat) {
return [
name,
stat.p1,
stat.p2_5,
stat.p50,
stat.p97_5,
stat.average,
stat.stddev,
typeof stat.min === 'string' ? stat.min : Math.floor(stat.min * 100) / 100
]
}

function asColor (colorise, row) {
return row.map((entry) => colorise(entry))
}

function asMs (stat) {
const result = Object.create(null)
Object.keys(stat).forEach((k) => {
result[k] = `${stat[k]} ms`
})
result.max = typeof stat.max === 'string' ? stat.max : `${Math.floor(stat.max * 100) / 100} ms`

return result
}

function asBytes (stat) {
const result = Object.create(stat)

percentiles.forEach((p) => {
const key = `p${p}`.replace('.', '_')
result[key] = prettyBytes(stat[key])
})

result.average = prettyBytes(stat.average)
result.stddev = prettyBytes(stat.stddev)
result.max = prettyBytes(stat.max)
Expand Down
4 changes: 2 additions & 2 deletions lib/run.js
Expand Up @@ -130,9 +130,9 @@ function run (opts, cb) {
title: opts.title,
url: opts.url,
socketPath: opts.socketPath,
requests: histAsObj(requests, totalCompletedRequests),
requests: addPercentiles(requests, histAsObj(requests, totalCompletedRequests)),
latency: addPercentiles(latencies, histAsObj(latencies)),
throughput: histAsObj(throughput, totalBytes),
throughput: addPercentiles(throughput, histAsObj(throughput, totalBytes)),
errors: errors,
timeouts: timeouts,
duration: Math.round((Date.now() - startTime) / 1000),
Expand Down
7 changes: 5 additions & 2 deletions package.json
Expand Up @@ -7,7 +7,7 @@
"autocannon": "autocannon.js"
},
"scripts": {
"test": "standard && tap test/*.test.js"
"test": "standard && tap --timeout 45 test/*.test.js"
},
"pre-commit": [
"test"
Expand Down Expand Up @@ -45,11 +45,14 @@
"dependencies": {
"chalk": "^2.4.1",
"color-support": "^1.1.1",
"has-async-hooks": "^1.0.0",
"hdr-histogram-js": "^1.1.4",
"hdr-histogram-percentiles-obj": "^1.2.0",
"hdr-histogram-percentiles-obj": "^2.0.0",
"http-parser-js": "^0.4.13",
"hyperid": "^1.4.1",
"manage-path": "^2.0.0",
"minimist": "^1.2.0",
"on-net-listen": "^1.1.1",
"pretty-bytes": "^5.1.0",
"progress": "^2.0.0",
"reinterval": "^1.1.0",
Expand Down
8 changes: 6 additions & 2 deletions test/cli-ipc.test.js
Expand Up @@ -13,11 +13,15 @@ const lines = [
/Running 1s test @ http:\/\/example.com\/foo \([^)]*\)$/,
/10 connections.*$/,
/$/,
/Stat.*Avg.*Stdev.*Max.*$/,
/Latency \(ms\).*$/,
/Stat.*2\.5%.*50%.*97\.5%.*99%.*Avg.*Stdev.*Max.*$/,
/Latency.*$/,
/$/,
/Stat.*1%.*2\.5%.*50%.*97\.5%.*Avg.*Stdev.*Min.*$/,
/Req\/Sec.*$/,
/Bytes\/Sec.*$/,
/$/,
/Req\/Bytes counts sampled once per second.*$/,
/$/,
/.* requests in \d+s, .* read/
]

Expand Down
8 changes: 6 additions & 2 deletions test/cli.test.js
Expand Up @@ -10,11 +10,15 @@ const lines = [
/Running 1s test @ .*$/,
/10 connections.*$/,
/$/,
/Stat.*Avg.*Stdev.*Max.*$/,
/Latency \(ms\).*$/,
/Stat.*2\.5%.*50%.*97\.5%.*99%.*Avg.*Stdev.*Max.*$/,
/Latency.*$/,
/$/,
/Stat.*1%.*2\.5%.*50%.*97\.5%.*Avg.*Stdev.*Min.*$/,
/Req\/Sec.*$/,
/Bytes\/Sec.*$/,
/$/,
/Req\/Bytes counts sampled once per second.*$/,
/$/,
/.* requests in \d+s, .* read/
]

Expand Down
8 changes: 6 additions & 2 deletions test/envPort.test.js
Expand Up @@ -10,11 +10,15 @@ const lines = [
/Running 1s test @ .*$/,
/10 connections.*$/,
/$/,
/Stat.*Avg.*Stdev.*Max.*$/,
/Latency \(ms\).*$/,
/Stat.*2\.5%.*50%.*97\.5%.*99%.*Avg.*Stdev.*Max.*$/,
/Latency.*$/,
/$/,
/Stat.*1%.*2\.5%.*50%.*97\.5%.*Avg.*Stdev.*Min.*$/,
/Req\/Sec.*$/,
/Bytes\/Sec.*$/,
/$/,
/Req\/Bytes counts sampled once per second.*$/,
/$/,
/.* requests in \d+s, .* read/
]

Expand Down

0 comments on commit 57ce004

Please sign in to comment.