1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-18 11:53:59 -05:00

add tcp proxy benchmarks + split out website section for proxy req/s (#2464)

This commit is contained in:
Kurt Mackey 2019-06-06 21:46:18 -05:00 committed by Ryan Dahl
parent 9bea576f3e
commit 9a6cfd653d
5 changed files with 252 additions and 32 deletions

29
tools/deno_tcp_proxy.ts Normal file
View file

@ -0,0 +1,29 @@
// Used for benchmarking Deno's tcp proxy perfromance. See tools/http_benchmark.py
const addr = Deno.args[1] || "127.0.0.1:4500";
const originAddr = Deno.args[2] || "127.0.0.1:4501";
const listener = Deno.listen("tcp", addr);
async function handle(conn: Deno.Conn): Promise<void> {
const origin = await Deno.dial("tcp", originAddr);
try {
await Promise.all([Deno.copy(conn, origin), Deno.copy(origin, conn)]);
} catch (err) {
if (err.message !== "read error" && err.message !== "write error") {
throw err;
}
} finally {
conn.close();
origin.close();
}
}
async function main(): Promise<void> {
console.log("Listening on", addr);
while (true) {
const conn = await listener.accept();
handle(conn);
}
}
main();

View file

@ -37,6 +37,18 @@ def deno_http(deno_exe):
})
def deno_tcp_proxy(deno_exe, hyper_hello_exe):
deno_cmd = [
deno_exe, "run", "--allow-net", "tools/deno_tcp_proxy.ts", ADDR,
ORIGIN_ADDR
]
print "http_proxy_benchmark testing DENO using net/tcp."
return run(
deno_cmd,
merge_env={"DENO_DIR": os.path.join(util.root_path, "js")},
origin_cmd=http_proxy_origin(hyper_hello_exe))
def deno_http_proxy(deno_exe, hyper_hello_exe):
deno_cmd = [
deno_exe, "run", "--allow-net", "tools/deno_http_proxy.ts", ADDR,
@ -71,6 +83,16 @@ def node_http_proxy(hyper_hello_exe):
return run(node_cmd, None, http_proxy_origin(hyper_hello_exe))
def node_tcp_proxy(hyper_hello_exe):
node_cmd = [
"node", "tools/node_tcp_proxy.js",
ADDR.split(":")[1],
ORIGIN_ADDR.split(":")[1]
]
print "http_proxy_benchmark testing NODE tcp."
return run(node_cmd, None, http_proxy_origin(hyper_hello_exe))
def node_tcp():
node_cmd = ["node", "tools/node_tcp.js", ADDR.split(":")[1]]
print "http_benchmark testing node_tcp.js"
@ -97,11 +119,13 @@ def http_benchmark(build_dir):
# "deno_http" was once called "deno_net_http"
"deno_http": deno_http(deno_exe),
"deno_proxy": deno_http_proxy(deno_exe, hyper_hello_exe),
"deno_proxy_tcp": deno_tcp_proxy(deno_exe, hyper_hello_exe),
"deno_core_single": deno_core_single(core_http_bench_exe),
"deno_core_multi": deno_core_multi(core_http_bench_exe),
# "node_http" was once called "node"
"node_http": node_http(),
"node_proxy": node_http_proxy(hyper_hello_exe),
"node_proxy_tcp": node_tcp_proxy(hyper_hello_exe),
"node_tcp": node_tcp(),
"hyper": hyper_http(hyper_hello_exe)
}
@ -127,7 +151,7 @@ def run(server_cmd, merge_env=None, origin_cmd=None):
server = subprocess.Popen(server_cmd, env=env)
time.sleep(5) # wait for server to wake up. TODO racy.
time.sleep(15) # wait for server to wake up. TODO racy.
try:
cmd = "third_party/wrk/%s/wrk -d %s http://%s/" % (util.platform(),

68
tools/node_tcp_proxy.js Normal file
View file

@ -0,0 +1,68 @@
const net = require("net");
process.on("uncaughtException", function(error) {
console.error(error);
});
if (process.argv.length != 4) {
console.log("usage: %s <localport> <remoteport>", process.argv[1]);
process.exit();
}
const localport = process.argv[2];
const remoteport = process.argv[3];
const remotehost = "127.0.0.1";
const server = net.createServer(function(localsocket) {
const remotesocket = new net.Socket();
remotesocket.connect(remoteport, remotehost);
localsocket.on("data", function(data) {
const flushed = remotesocket.write(data);
if (!flushed) {
localsocket.pause();
}
});
remotesocket.on("data", function(data) {
const flushed = localsocket.write(data);
if (!flushed) {
remotesocket.pause();
}
});
localsocket.on("drain", function() {
remotesocket.resume();
});
remotesocket.on("drain", function() {
localsocket.resume();
});
localsocket.on("close", function() {
remotesocket.end();
});
remotesocket.on("close", function() {
localsocket.end();
});
localsocket.on("error", function() {
localsocket.end();
});
remotesocket.on("error", function() {
remotesocket.end();
});
});
server.listen(localport);
console.log(
"redirecting connections from 127.0.0.1:%d to %s:%d",
localport,
remotehost,
remoteport
);

View file

@ -42,6 +42,10 @@ export function createThroughputColumns(data) {
return createColumns(data, "throughput");
}
export function createProxyColumns(data) {
return createColumns(data, "req_per_sec_proxy");
}
export function createReqPerSecColumns(data) {
return createColumns(data, "req_per_sec");
}
@ -197,15 +201,41 @@ export function drawCharts(dataUrl) {
return drawChartsFromBenchmarkData(dataUrl);
}
const proxyFields = [
"req_per_sec"
//"max_latency"
];
function extractProxyFields(data) {
for (const row of data) {
for (const field of proxyFields) {
const d = row[field];
if (!d) continue;
const name = field + "_proxy";
const newField = {};
row[name] = newField;
for (const k of Object.getOwnPropertyNames(d)) {
if (k.includes("_proxy")) {
const v = d[k];
delete d[k];
newField[k] = v;
}
}
}
}
}
/**
* Draws the charts from the benchmark data stored in gh-pages branch.
*/
export async function drawChartsFromBenchmarkData(dataUrl) {
const data = await getJson(dataUrl);
// hack to extract proxy fields from req/s fields
extractProxyFields(data);
const execTimeColumns = createExecTimeColumns(data);
const throughputColumns = createThroughputColumns(data);
const reqPerSecColumns = createReqPerSecColumns(data);
const proxyColumns = createProxyColumns(data);
const maxLatencyColumns = createMaxLatencyColumns(data);
const maxMemoryColumns = createMaxMemoryColumns(data);
const binarySizeColumns = createBinarySizeColumns(data);
@ -235,6 +265,7 @@ export async function drawChartsFromBenchmarkData(dataUrl) {
gen("#exec-time-chart", execTimeColumns, "seconds", logScale);
gen("#throughput-chart", throughputColumns, "seconds", logScale);
gen("#req-per-sec-chart", reqPerSecColumns, "1000 req/sec", formatReqSec);
gen("#proxy-req-per-sec-chart", proxyColumns, "req/sec");
gen("#max-latency-chart", maxLatencyColumns, "milliseconds", logScale);
gen("#max-memory-chart", maxMemoryColumns, "megabytes", formatMB);
gen("#binary-size-chart", binarySizeColumns, "megabytes", formatMB);

View file

@ -3,17 +3,18 @@
<html>
<head>
<title>Deno Benchmarks</title>
<link rel="shortcut icon" href="favicon.ico">
<link rel="shortcut icon" href="favicon.ico" />
<link rel="stylesheet" href="https://unpkg.com/c3@0.6.7/c3.min.css" />
<link rel="stylesheet" href="style.css" />
<meta content="width=device-width, initial-scale=1.0" name="viewport" />
</head>
<body>
<div id="spinner-overlay">
<div class="spinner"></div>`
<div class="spinner"></div>
`
</div>
<main>
<a href="/"><img src="images/deno_logo_4.gif" width=200></a>
<a href="/"><img src="images/deno_logo_4.gif" width="200"/></a>
<h1>Deno Continuous Benchmarks</h1>
<p>
@ -21,7 +22,10 @@
<a href="https://github.com/denoland/deno">master branch</a>.
</p>
<p>Make sure your adblocker is disabled as some can block the chart rendering.</p>
<p>
Make sure your adblocker is disabled as some can block the chart
rendering.
</p>
<p><a href="#recent">recent data</a></p>
<p><a href="#all">all data</a> (takes a moment to load)</p>
@ -37,41 +41,53 @@
<li>
<a
href="https://github.com/denoland/deno/blob/master/tools/deno_tcp.ts"
>deno_tcp</a>
>deno_tcp</a
>
is a fake http server that doesn't parse HTTP. It is comparable to
<a
href="https://github.com/denoland/deno/blob/master/tools/node_tcp.js"
>node_tcp</a>
>node_tcp</a
>
.
</li>
<li>
<a
href="https://github.com/denoland/deno_std/blob/master/http/http_bench.ts"
>deno_http</a>
>deno_http</a
>
is a web server written in TypeScript. It is comparable to
<a
href="https://github.com/denoland/deno/blob/master/tools/node_http.js"
>node_http</a>
>node_http</a
>
.
</li>
<li>deno_core_single and deno_core_multi are two versions of
a minimal fake HTTP server. It blindly reads and writes fixed HTTP
packets. It is comparable to deno_tcp and node_tcp.
This is a standalone executable that uses <a
href="https://crates.io/crates/deno">the deno rust crate</a>. The
<li>
deno_core_single and deno_core_multi are two versions of a minimal
fake HTTP server. It blindly reads and writes fixed HTTP packets. It
is comparable to deno_tcp and node_tcp. This is a standalone
executable that uses
<a href="https://crates.io/crates/deno">the deno rust crate</a>. The
code is in
<a
href="https://github.com/denoland/deno/blob/master/core/examples/http_bench.rs"
>http_bench.rs</a>
>http_bench.rs</a
>
and
<a
href="https://github.com/denoland/deno/blob/master/core/examples/http_bench.js"
>http_bench.js</a>. single uses <a
href="https://docs.rs/tokio/0.1.19/tokio/runtime/current_thread/index.html">tokio::runtime::current_thread</a>
and multi uses <a
href="https://docs.rs/tokio/0.1.19/tokio/runtime/">tokio::runtime::threadpool</a>.
>http_bench.js</a
>. single uses
<a
href="https://docs.rs/tokio/0.1.19/tokio/runtime/current_thread/index.html"
>tokio::runtime::current_thread</a
>
and multi uses
<a href="https://docs.rs/tokio/0.1.19/tokio/runtime/"
>tokio::runtime::threadpool</a
>.
</li>
<li>
@ -86,10 +102,60 @@
<div id="req-per-sec-chart"></div>
<h3 id="proxy-req-per-sec">
Proxy Req/Sec <a href="#proxy-eq-per-sec">#</a>
</h3>
<p>
Tests proxy performance. 10 keep-alive connections do as many
hello-world requests as possible. Bigger is better.
</p>
<ul>
<li>
<a
href="https://github.com/denoland/deno/blob/master/tools/deno_tcp_proxy.ts"
>deno_proxy_tcp</a
>
is a fake tcp proxy server that doesn't parse HTTP. It is comparable
to
<a
href="https://github.com/denoland/deno/blob/master/tools/node_tcp_proxy.js"
>node_proxy_tcp</a
>
.
</li>
<li>
<a
href="https://github.com/denoland/deno/blob/master/tools/deno_http_proxy.ts"
>deno_proxy</a
>
is an HTTP proxy server written in TypeScript. It is comparable to
<a
href="https://github.com/denoland/deno/blob/master/tools/node_http_proxy.js"
>node_proxy</a
>
.
</li>
<li>
<a
href="https://github.com/denoland/deno/blob/master/tools/hyper_hello.rs"
>
hyper
</a>
is a Rust HTTP server used as the origin for the proxy tests
</li>
</ul>
<div id="proxy-req-per-sec-chart"></div>
<h3 id="max-latency">Max Latency <a href="#max-latency">#</a></h3>
<p>
Max latency during the same test used above for requests/second. Smaller is better.
Max latency during the same test used above for requests/second. Smaller
is better.
</p>
<div id="max-latency-chart"></div>
@ -101,8 +167,8 @@
<a
href="https://github.com/denoland/deno/blob/master/tests/002_hello.ts"
>
tests/002_hello.ts
</a>,
tests/002_hello.ts </a
>,
<a
href="https://github.com/denoland/deno/blob/master/tests/003_relative_import.ts"
>tests/003_relative_import.ts</a
@ -167,26 +233,28 @@
<script type="module">
import { drawCharts } from "./app.js";
window.chartWidth = 800;
const overlay = document.getElementById("spinner-overlay")
const overlay = document.getElementById("spinner-overlay");
function showSpinner () {
function showSpinner() {
overlay.style.display = "block";
}
function hideSpinner () {
function hideSpinner() {
overlay.style.display = "none";
}
function updateCharts () {
const u = window.location.hash.match("all") ? "./data.json" : "recent.json";
function updateCharts() {
const u = window.location.hash.match("all")
? "./data.json"
: "recent.json";
showSpinner()
showSpinner();
drawCharts(u).finally(hideSpinner)
drawCharts(u).finally(hideSpinner);
}
updateCharts()
updateCharts();
window.onhashchange = updateCharts
window.onhashchange = updateCharts;
</script>
</body>
</html>