1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-24 08:09:08 -05:00

First pass at http benchmark.

This commit is contained in:
Ryan Dahl 2018-10-15 16:44:35 -04:00
parent 62962e71fe
commit c61a0f2f84
11 changed files with 154 additions and 1 deletions

34
tests/http_bench.ts Normal file
View file

@ -0,0 +1,34 @@
// Used for benchmarking Deno's networking. See tools/http_benchmark.py
// TODO Replace this with a real HTTP server once
// https://github.com/denoland/deno/issues/726 is completed.
import * as deno from "deno";
const addr = deno.args[1] || "127.0.0.1:4500";
const listener = deno.listen("tcp", addr);
const response = new TextEncoder().encode(
"HTTP/1.1 200 OK\r\nContent-Length: 12\r\n\r\nHello World\n"
);
async function handle(conn: deno.Conn): Promise<void> {
const buffer = new Uint8Array(1024);
try {
while (true) {
const r = await conn.read(buffer);
if (r.eof) {
break;
}
await conn.write(response);
}
} finally {
conn.close();
}
}
async function main(): Promise<void> {
console.log("Listening on", addr);
while (true) {
const conn = await listener.accept();
handle(conn);
}
}
main();

@ -1 +1 @@
Subproject commit e1a853036189b8694dcc42db4d43433edb6c9036
Subproject commit 4f1056e8f0bb08f1f1add4f358431fce84ce35eb

View file

@ -14,6 +14,7 @@ from util import run, run_output, root_path, build_path, executable_suffix
import tempfile
import http_server
import throughput_benchmark
from http_benchmark import http_benchmark
# The list of the tuples of the benchmark name and arguments
exec_time_benchmarks = [
@ -183,6 +184,7 @@ def main(argv):
# pipe.
if os.name != 'nt':
new_data["throughput"] = run_throughput(deno_path)
new_data["req_per_sec"] = http_benchmark(deno_path)
if "linux" in sys.platform:
# Thread count test, only on linux
new_data["thread_count"] = run_thread_count_benchmark(deno_path)

49
tools/http_benchmark.py Executable file
View file

@ -0,0 +1,49 @@
#!/usr/bin/env python
import os
import sys
import util
import time
import subprocess
ADDR = "127.0.0.1:4544"
DURATION = "10s"
def http_benchmark(deno_exe):
deno_cmd = [deno_exe, "--allow-net", "tests/http_bench.ts", ADDR]
node_cmd = ["node", "tools/node_http.js", ADDR.split(":")[1]]
print "http_benchmark testing DENO."
deno_rps = run(deno_cmd)
print "http_benchmark testing NODE."
node_rps = run(node_cmd)
return {"deno": deno_rps, "node": node_rps}
def run(server_cmd):
# Run deno echo server in the background.
server = subprocess.Popen(server_cmd)
time.sleep(5) # wait for server to wake up. TODO racy.
wrk_platform = {
"linux2": "linux",
"darwin": "mac",
}[sys.platform]
try:
cmd = "third_party/wrk/" + wrk_platform + "/wrk -d " + DURATION + " http://" + ADDR + "/"
print cmd
output = subprocess.check_output(cmd, shell=True)
req_per_sec = util.parse_wrk_output(output)
print output
return req_per_sec
finally:
server.kill()
if __name__ == '__main__':
if len(sys.argv) < 2:
print "Usage ./tools/tcp_http_benchmark.py out/debug/deno"
sys.exit(1)
http_benchmark(sys.argv[1])

8
tools/node_http.js Normal file
View file

@ -0,0 +1,8 @@
const http = require("http");
const port = process.argv[2] || "4544";
console.log("port", port);
http
.Server((req, res) => {
res.end("Hello World\n");
})
.listen(port);

9
tools/testdata/wrk1.txt vendored Normal file
View file

@ -0,0 +1,9 @@
Running 10s test @ http://127.0.0.1:4500/
2 threads and 10 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 5.08ms 1.37ms 34.96ms 96.63%
Req/Sec 0.92k 51.83 1.00k 78.50%
18381 requests in 10.00s, 0.89MB read
Socket errors: connect 0, read 18381, write 0, timeout 0
Requests/sec: 1837.86
Transfer/sec: 91.53KB

View file

@ -324,3 +324,11 @@ def extract_number(pattern, string):
if len(matches) != 1:
return None
return int(matches[0])
def parse_wrk_output(output):
req_per_sec = None
for line in output.split("\n"):
if req_per_sec is None:
req_per_sec = extract_number(r'Requests/sec:\s+(\d+)', line)
return req_per_sec

View file

@ -78,11 +78,19 @@ def parse_unit_test_output_test():
assert expected == None
def parse_wrk_output_test():
print "Testing util.parse_wrk_output_test()..."
f = open(os.path.join(util.root_path, "tools/testdata/wrk1.txt"))
req_per_sec = util.parse_wrk_output(f.read())
assert req_per_sec == 1837
def util_test():
pattern_match_test()
parse_exit_code_test()
shell_quote_win_test()
parse_unit_test_output_test()
parse_wrk_output_test()
if __name__ == '__main__':

View file

@ -51,6 +51,10 @@ export function createThroughputColumns(data) {
return createColumns(data, "throughput");
}
export function createReqPerSecColumns(data) {
return createColumns(data, "req_per_sec");
}
export function createBinarySizeColumns(data) {
const propName = "binary_size";
const binarySizeNames = Object.keys(data[data.length - 1][propName]);
@ -132,6 +136,7 @@ export async function main() {
const execTimeColumns = createExecTimeColumns(data);
const throughputColumns = createThroughputColumns(data);
const reqPerSecColumns = createReqPerSecColumns(data);
const binarySizeColumns = createBinarySizeColumns(data);
const threadCountColumns = createThreadCountColumns(data);
const syscallCountColumns = createSyscallCountColumns(data);
@ -188,6 +193,24 @@ export async function main() {
}
});
c3.generate({
bindto: "#req-per-sec-chart",
data: {
columns: reqPerSecColumns,
onclick: viewCommitOnClick(sha1List)
},
axis: {
x: {
type: "category",
show: false,
categories: sha1ShortList
},
y: {
label: "seconds"
}
}
});
c3.generate({
bindto: "#binary-size-chart",
data: {

View file

@ -28,6 +28,10 @@ const regularData = [
"10M_tcp": 1.6,
"10M_cat": 1.0
},
req_per_sec: {
node: 16000,
deno: 1000
},
benchmark: {
hello: {
mean: 0.05
@ -66,6 +70,10 @@ const regularData = [
"10M_tcp": 1.6,
"10M_cat": 1.0
},
req_per_sec: {
node: 1600,
deno: 3.0
},
benchmark: {
hello: {
mean: 0.055

View file

@ -29,6 +29,10 @@
<h2>Throughput</h2>
<div id="throughput-chart"></div>
<h2>Req/Sec</h2>
Tests HTTP server performance against Node.
<div id="req-per-sec-chart"></div>
<h2>Executable size</h2>
deno ships only a single binary. We track its size here.
<div id="binary-size-chart"></div>