2018-09-21 12:06:59 -04:00
|
|
|
#!/usr/bin/env python
|
2020-01-02 15:13:47 -05:00
|
|
|
# Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
|
2018-09-21 12:06:59 -04:00
|
|
|
# Performs benchmark and append data to //website/data.json.
|
2018-11-30 03:27:41 -05:00
|
|
|
# If //website/data.json doesn't exist, this script tries to import it from
|
|
|
|
# gh-pages branch.
|
2018-09-21 12:06:59 -04:00
|
|
|
# To view the results locally run ./tools/http_server.py and visit
|
|
|
|
# http://localhost:4545/website
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
|
|
|
import time
|
2019-09-11 16:47:42 -04:00
|
|
|
import tempfile
|
|
|
|
import subprocess
|
2019-09-15 09:51:39 -04:00
|
|
|
from util import build_path, executable_suffix, root_path, run, run_output
|
2019-09-14 09:01:27 -04:00
|
|
|
import third_party
|
2019-09-11 16:47:42 -04:00
|
|
|
from http_benchmark import http_benchmark
|
|
|
|
import throughput_benchmark
|
|
|
|
import http_server
|
2018-09-24 23:58:18 -04:00
|
|
|
|
2020-05-21 07:08:43 -04:00
|
|
|
# The list of the tuples of the benchmark name, arguments and return code
|
2018-09-25 04:28:56 -04:00
|
|
|
exec_time_benchmarks = [
|
2020-05-21 07:08:43 -04:00
|
|
|
("hello", ["run", "cli/tests/002_hello.ts"], None),
|
|
|
|
("relative_import", ["run", "cli/tests/003_relative_import.ts"], None),
|
|
|
|
("error_001", ["run", "cli/tests/error_001.ts"], 1),
|
|
|
|
("cold_hello", ["run", "--reload", "cli/tests/002_hello.ts"], None),
|
2020-05-04 07:03:30 -04:00
|
|
|
("cold_relative_import",
|
2020-05-21 07:08:43 -04:00
|
|
|
["run", "--reload", "cli/tests/003_relative_import.ts"], None),
|
2020-05-11 14:52:25 -04:00
|
|
|
("workers_startup",
|
2020-05-21 07:08:43 -04:00
|
|
|
["run", "--allow-read", "cli/tests/workers_startup_bench.ts"], None),
|
2020-05-11 14:52:25 -04:00
|
|
|
("workers_round_robin",
|
2020-05-21 07:08:43 -04:00
|
|
|
["run", "--allow-read", "cli/tests/workers_round_robin_bench.ts"], None),
|
|
|
|
("text_decoder", ["run", "cli/tests/text_decoder_perf.js"], None),
|
|
|
|
("text_encoder", ["run", "cli/tests/text_encoder_perf.js"], None),
|
2018-09-25 04:28:56 -04:00
|
|
|
]
|
2018-09-21 12:06:59 -04:00
|
|
|
|
|
|
|
|
|
|
|
def read_json(filename):
|
|
|
|
with open(filename) as json_file:
|
|
|
|
return json.load(json_file)
|
|
|
|
|
|
|
|
|
|
|
|
def write_json(filename, data):
|
|
|
|
with open(filename, 'w') as outfile:
|
|
|
|
json.dump(data, outfile)
|
|
|
|
|
|
|
|
|
2018-09-25 20:08:09 -04:00
|
|
|
def get_binary_sizes(build_dir):
|
|
|
|
sizes = {}
|
2019-09-15 09:51:39 -04:00
|
|
|
mtimes = {}
|
|
|
|
# The deno executable should be located at the root of the build tree.
|
|
|
|
deno_exe = os.path.join(build_dir, "deno" + executable_suffix)
|
|
|
|
sizes["deno"] = os.path.getsize(deno_exe)
|
|
|
|
# Because cargo's OUT_DIR is not predictable, search the build tree for
|
|
|
|
# snapshot related files.
|
|
|
|
for parent_dir, _, file_names in os.walk(build_dir):
|
|
|
|
for file_name in file_names:
|
|
|
|
if not file_name in [
|
|
|
|
"CLI_SNAPSHOT.bin",
|
|
|
|
"CLI_SNAPSHOT.js",
|
|
|
|
"CLI_SNAPSHOT.js.map",
|
|
|
|
"COMPILER_SNAPSHOT.bin",
|
|
|
|
"COMPILER_SNAPSHOT.js",
|
|
|
|
"COMPILER_SNAPSHOT.js.map",
|
|
|
|
]:
|
|
|
|
continue
|
|
|
|
file_path = os.path.join(parent_dir, file_name)
|
|
|
|
file_mtime = os.path.getmtime(file_path)
|
|
|
|
# If multiple copies of a file are found, use the most recent one.
|
|
|
|
if file_name in mtimes and mtimes[file_name] > file_mtime:
|
|
|
|
continue
|
|
|
|
mtimes[file_name] = file_mtime
|
|
|
|
sizes[file_name] = os.path.getsize(file_path)
|
2018-09-25 20:08:09 -04:00
|
|
|
return sizes
|
|
|
|
|
|
|
|
|
2018-09-24 23:58:18 -04:00
|
|
|
def get_strace_summary_text(test_args):
|
2018-09-24 18:12:52 -04:00
|
|
|
f = tempfile.NamedTemporaryFile()
|
2019-04-17 15:47:07 -04:00
|
|
|
cmd = ["strace", "-c", "-f", "-o", f.name] + test_args
|
|
|
|
try:
|
|
|
|
subprocess.check_output(cmd)
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
pass
|
2018-09-24 23:58:18 -04:00
|
|
|
return f.read()
|
|
|
|
|
|
|
|
|
|
|
|
def strace_parse(summary_text):
|
|
|
|
summary = {}
|
|
|
|
# clear empty lines
|
|
|
|
lines = list(filter(lambda x: x and x != "\n", summary_text.split("\n")))
|
2020-01-20 09:49:18 -05:00
|
|
|
# Filter out non-relevant lines. See the error log at
|
|
|
|
# https://github.com/denoland/deno/pull/3715/checks?check_run_id=397365887
|
|
|
|
# This is checked in tools/testdata/strace_summary2.out
|
|
|
|
lines = [x for x in lines if x.find("detached ...") == -1]
|
2018-09-24 23:58:18 -04:00
|
|
|
if len(lines) < 4:
|
|
|
|
return {} # malformed summary
|
|
|
|
lines, total_line = lines[2:-2], lines[-1]
|
|
|
|
# data to dict for each line
|
|
|
|
for line in lines:
|
|
|
|
syscall_fields = line.split()
|
|
|
|
syscall_name = syscall_fields[-1]
|
|
|
|
syscall_dict = {}
|
|
|
|
if 5 <= len(syscall_fields) <= 6:
|
|
|
|
syscall_dict = {
|
|
|
|
"% time": float(syscall_fields[0]),
|
|
|
|
"seconds": float(syscall_fields[1]),
|
|
|
|
"usecs/call": int(syscall_fields[2]),
|
|
|
|
"calls": int(syscall_fields[3])
|
|
|
|
}
|
|
|
|
syscall_dict["errors"] = 0 if len(syscall_fields) < 6 else int(
|
|
|
|
syscall_fields[4])
|
|
|
|
summary[syscall_name] = syscall_dict
|
|
|
|
# record overall (total) data
|
|
|
|
total_fields = total_line.split()
|
|
|
|
summary["total"] = {
|
|
|
|
"% time": float(total_fields[0]),
|
|
|
|
"seconds": float(total_fields[1]),
|
|
|
|
"calls": int(total_fields[2]),
|
|
|
|
"errors": int(total_fields[3])
|
|
|
|
}
|
|
|
|
return summary
|
|
|
|
|
|
|
|
|
|
|
|
def get_strace_summary(test_args):
|
2020-01-18 04:20:10 -05:00
|
|
|
s = get_strace_summary_text(test_args)
|
|
|
|
try:
|
|
|
|
return strace_parse(s)
|
|
|
|
except ValueError:
|
|
|
|
print "error parsing strace"
|
|
|
|
print "----- <strace> -------"
|
|
|
|
print s
|
|
|
|
print "----- </strace> ------"
|
2018-09-24 18:12:52 -04:00
|
|
|
|
|
|
|
|
2018-10-11 16:55:22 -04:00
|
|
|
def run_throughput(deno_exe):
|
|
|
|
m = {}
|
|
|
|
m["100M_tcp"] = throughput_benchmark.tcp(deno_exe, 100)
|
|
|
|
m["100M_cat"] = throughput_benchmark.cat(deno_exe, 100)
|
|
|
|
m["10M_tcp"] = throughput_benchmark.tcp(deno_exe, 10)
|
|
|
|
m["10M_cat"] = throughput_benchmark.cat(deno_exe, 10)
|
|
|
|
return m
|
|
|
|
|
|
|
|
|
2019-04-17 15:47:07 -04:00
|
|
|
# "thread_count" and "syscall_count" are both calculated here.
|
|
|
|
def run_strace_benchmarks(deno_exe, new_data):
|
|
|
|
thread_count = {}
|
|
|
|
syscall_count = {}
|
2020-05-21 07:08:43 -04:00
|
|
|
for (name, args, _) in exec_time_benchmarks:
|
2020-01-16 15:32:25 -05:00
|
|
|
s = get_strace_summary([deno_exe] + args)
|
2019-04-17 15:47:07 -04:00
|
|
|
thread_count[name] = s["clone"]["calls"] + 1
|
|
|
|
syscall_count[name] = s["total"]["calls"]
|
|
|
|
new_data["thread_count"] = thread_count
|
|
|
|
new_data["syscall_count"] = syscall_count
|
2018-09-24 23:58:18 -04:00
|
|
|
|
|
|
|
|
2019-04-16 13:57:05 -04:00
|
|
|
# Takes the output from "/usr/bin/time -v" as input and extracts the 'maximum
|
|
|
|
# resident set size' and returns it in bytes.
|
|
|
|
def find_max_mem_in_bytes(time_v_output):
|
|
|
|
for line in time_v_output.split('\n'):
|
|
|
|
if 'maximum resident set size (kbytes)' in line.lower():
|
|
|
|
_, value = line.split(': ')
|
|
|
|
return int(value) * 1024
|
|
|
|
|
|
|
|
|
2019-04-17 10:52:36 -04:00
|
|
|
def run_max_mem_benchmark(deno_exe):
|
|
|
|
results = {}
|
2020-05-21 07:08:43 -04:00
|
|
|
for (name, args, return_code) in exec_time_benchmarks:
|
2020-01-16 15:32:25 -05:00
|
|
|
cmd = ["/usr/bin/time", "-v", deno_exe] + args
|
2019-04-17 10:52:36 -04:00
|
|
|
try:
|
|
|
|
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
|
2020-05-21 07:08:43 -04:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
if (return_code is e.returncode):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise e
|
2019-04-17 10:52:36 -04:00
|
|
|
mem = find_max_mem_in_bytes(out)
|
|
|
|
results[name] = mem
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def run_exec_time(deno_exe, build_dir):
|
2019-09-14 09:01:27 -04:00
|
|
|
hyperfine_exe = third_party.get_prebuilt_tool_path("hyperfine")
|
2019-04-17 10:52:36 -04:00
|
|
|
benchmark_file = os.path.join(build_dir, "hyperfine_results.json")
|
2020-05-21 07:08:43 -04:00
|
|
|
|
|
|
|
def benchmark_command(deno_exe, args, return_code):
|
|
|
|
# Bash test which asserts the return code value of the previous command
|
|
|
|
# $? contains the return code of the previous command
|
|
|
|
return_code_test = "; test $? -eq {}".format(
|
|
|
|
return_code) if return_code is not None else ""
|
|
|
|
return "{} {}{}".format(deno_exe, " ".join(args), return_code_test)
|
|
|
|
|
|
|
|
run([hyperfine_exe, "--export-json", benchmark_file, "--warmup", "3"] + [
|
|
|
|
benchmark_command(deno_exe, args, return_code)
|
|
|
|
for (_, args, return_code) in exec_time_benchmarks
|
2019-04-17 10:52:36 -04:00
|
|
|
])
|
|
|
|
hyperfine_results = read_json(benchmark_file)
|
|
|
|
results = {}
|
2020-05-21 07:08:43 -04:00
|
|
|
for [[name, _, _], data] in zip(exec_time_benchmarks,
|
|
|
|
hyperfine_results["results"]):
|
2019-04-17 10:52:36 -04:00
|
|
|
results[name] = {
|
|
|
|
"mean": data["mean"],
|
|
|
|
"stddev": data["stddev"],
|
|
|
|
"user": data["user"],
|
|
|
|
"system": data["system"],
|
|
|
|
"min": data["min"],
|
|
|
|
"max": data["max"]
|
|
|
|
}
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def run_http(build_dir, new_data):
|
|
|
|
stats = http_benchmark(build_dir)
|
|
|
|
new_data["req_per_sec"] = {k: v["req_per_sec"] for k, v in stats.items()}
|
|
|
|
new_data["max_latency"] = {k: v["max_latency"] for k, v in stats.items()}
|
2019-04-16 13:57:05 -04:00
|
|
|
|
|
|
|
|
2019-07-28 06:11:08 -04:00
|
|
|
def bundle_benchmark(deno_exe):
|
|
|
|
bundles = {
|
2019-10-04 14:49:32 -04:00
|
|
|
"file_server": "./std/http/file_server.ts",
|
|
|
|
"gist": "./std/examples/gist.ts",
|
2019-07-28 06:11:08 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
sizes = {}
|
|
|
|
|
|
|
|
for name, url in bundles.items():
|
|
|
|
# bundle
|
|
|
|
path = name + ".bundle.js"
|
2020-04-30 11:23:40 -04:00
|
|
|
run([deno_exe, "bundle", "--unstable", url, path])
|
2019-07-28 06:11:08 -04:00
|
|
|
# get size of bundle
|
|
|
|
assert os.path.exists(path)
|
|
|
|
sizes[name] = os.path.getsize(path)
|
|
|
|
# remove bundle
|
|
|
|
os.remove(path)
|
|
|
|
|
|
|
|
return sizes
|
|
|
|
|
|
|
|
|
2020-03-20 21:48:34 -04:00
|
|
|
def main():
|
|
|
|
build_dir = build_path()
|
2019-06-08 07:46:57 -04:00
|
|
|
sha1 = run_output(["git", "rev-parse", "HEAD"],
|
|
|
|
exit_on_fail=True).out.strip()
|
2018-09-25 20:08:09 -04:00
|
|
|
http_server.spawn()
|
|
|
|
|
2019-04-17 10:52:36 -04:00
|
|
|
deno_exe = os.path.join(build_dir, "deno")
|
2018-09-21 12:06:59 -04:00
|
|
|
|
|
|
|
os.chdir(root_path)
|
2018-12-18 21:09:30 -05:00
|
|
|
|
2018-09-21 12:06:59 -04:00
|
|
|
new_data = {
|
|
|
|
"created_at": time.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
|
|
|
"sha1": sha1,
|
|
|
|
}
|
2019-04-17 10:52:36 -04:00
|
|
|
|
|
|
|
# TODO(ry) The "benchmark" benchmark should actually be called "exec_time".
|
|
|
|
# When this is changed, the historical data in gh-pages branch needs to be
|
|
|
|
# changed too.
|
|
|
|
new_data["benchmark"] = run_exec_time(deno_exe, build_dir)
|
2018-09-24 18:12:52 -04:00
|
|
|
|
2018-09-25 20:08:09 -04:00
|
|
|
new_data["binary_size"] = get_binary_sizes(build_dir)
|
2019-07-28 06:11:08 -04:00
|
|
|
new_data["bundle_size"] = bundle_benchmark(deno_exe)
|
2019-04-17 10:52:36 -04:00
|
|
|
|
2018-10-11 16:55:22 -04:00
|
|
|
# Cannot run throughput benchmark on windows because they don't have nc or
|
|
|
|
# pipe.
|
|
|
|
if os.name != 'nt':
|
2019-04-17 10:52:36 -04:00
|
|
|
new_data["throughput"] = run_throughput(deno_exe)
|
|
|
|
run_http(build_dir, new_data)
|
|
|
|
|
2018-09-24 18:12:52 -04:00
|
|
|
if "linux" in sys.platform:
|
2019-04-17 15:47:07 -04:00
|
|
|
run_strace_benchmarks(deno_exe, new_data)
|
2019-04-17 10:52:36 -04:00
|
|
|
new_data["max_memory"] = run_max_mem_benchmark(deno_exe)
|
|
|
|
|
|
|
|
print "===== <BENCHMARK RESULTS>"
|
|
|
|
print json.dumps(new_data, indent=2)
|
|
|
|
print "===== </BENCHMARK RESULTS>"
|
2018-09-24 18:12:52 -04:00
|
|
|
|
2019-10-06 11:18:15 -04:00
|
|
|
write_json(os.path.join(build_dir, "bench.json"), new_data)
|
2018-09-21 12:06:59 -04:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2020-03-20 21:48:34 -04:00
|
|
|
main()
|