mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 07:14:47 -05:00
benchmarks: fix max_memory and clean up (#2137)
This commit is contained in:
parent
8477daa8b9
commit
f20b05af24
2 changed files with 72 additions and 59 deletions
|
@ -125,13 +125,13 @@ def get_strace_summary(test_args):
|
|||
return strace_parse(get_strace_summary_text(test_args))
|
||||
|
||||
|
||||
def run_thread_count_benchmark(deno_path):
|
||||
def run_thread_count_benchmark(deno_exe):
|
||||
thread_count_map = {}
|
||||
thread_count_map["set_timeout"] = get_strace_summary([
|
||||
deno_path, "--reload", "tests/004_set_timeout.ts"
|
||||
deno_exe, "--reload", "tests/004_set_timeout.ts"
|
||||
])["clone"]["calls"] + 1
|
||||
thread_count_map["fetch_deps"] = get_strace_summary([
|
||||
deno_path, "--reload", "--allow-net", "tests/fetch_deps.ts"
|
||||
deno_exe, "--reload", "--allow-net", "tests/fetch_deps.ts"
|
||||
])["clone"]["calls"] + 1
|
||||
return thread_count_map
|
||||
|
||||
|
@ -145,12 +145,12 @@ def run_throughput(deno_exe):
|
|||
return m
|
||||
|
||||
|
||||
def run_syscall_count_benchmark(deno_path):
|
||||
def run_syscall_count_benchmark(deno_exe):
|
||||
syscall_count_map = {}
|
||||
syscall_count_map["hello"] = get_strace_summary(
|
||||
[deno_path, "--reload", "tests/002_hello.ts"])["total"]["calls"]
|
||||
[deno_exe, "--reload", "tests/002_hello.ts"])["total"]["calls"]
|
||||
syscall_count_map["fetch_deps"] = get_strace_summary(
|
||||
[deno_path, "--reload", "--allow-net",
|
||||
[deno_exe, "--reload", "--allow-net",
|
||||
"tests/fetch_deps.ts"])["total"]["calls"]
|
||||
return syscall_count_map
|
||||
|
||||
|
@ -164,10 +164,47 @@ def find_max_mem_in_bytes(time_v_output):
|
|||
return int(value) * 1024
|
||||
|
||||
|
||||
def run_max_mem_benchmark(deno_path):
|
||||
cmd = ["/usr/bin/time", "-v", deno_path, "--reload", "tests/002_hello.ts"]
|
||||
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
|
||||
return find_max_mem_in_bytes(out)
|
||||
def run_max_mem_benchmark(deno_exe):
|
||||
results = {}
|
||||
for (name, args) in exec_time_benchmarks:
|
||||
cmd = ["/usr/bin/time", "-v", deno_exe] + args
|
||||
try:
|
||||
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
mem = find_max_mem_in_bytes(out)
|
||||
results[name] = mem
|
||||
return results
|
||||
|
||||
|
||||
def run_exec_time(deno_exe, build_dir):
|
||||
benchmark_file = os.path.join(build_dir, "hyperfine_results.json")
|
||||
hyperfine = prebuilt.load_hyperfine()
|
||||
run([
|
||||
hyperfine, "--ignore-failure", "--export-json", benchmark_file,
|
||||
"--warmup", "3"
|
||||
] + [
|
||||
deno_exe + " " + " ".join(args) for [_, args] in exec_time_benchmarks
|
||||
])
|
||||
hyperfine_results = read_json(benchmark_file)
|
||||
results = {}
|
||||
for [[name, _], data] in zip(exec_time_benchmarks,
|
||||
hyperfine_results["results"]):
|
||||
results[name] = {
|
||||
"mean": data["mean"],
|
||||
"stddev": data["stddev"],
|
||||
"user": data["user"],
|
||||
"system": data["system"],
|
||||
"min": data["min"],
|
||||
"max": data["max"]
|
||||
}
|
||||
return results
|
||||
|
||||
|
||||
def run_http(build_dir, new_data):
|
||||
stats = http_benchmark(build_dir)
|
||||
new_data["req_per_sec"] = {k: v["req_per_sec"] for k, v in stats.items()}
|
||||
new_data["max_latency"] = {k: v["max_latency"] for k, v in stats.items()}
|
||||
|
||||
|
||||
def main(argv):
|
||||
|
@ -179,68 +216,44 @@ def main(argv):
|
|||
print "Usage: tools/benchmark.py [build_dir]"
|
||||
sys.exit(1)
|
||||
|
||||
sha1 = run_output(["git", "rev-parse", "HEAD"]).strip()
|
||||
http_server.spawn()
|
||||
|
||||
deno_path = os.path.join(build_dir, "deno")
|
||||
benchmark_file = os.path.join(build_dir, "benchmark.json")
|
||||
deno_exe = os.path.join(build_dir, "deno")
|
||||
|
||||
os.chdir(root_path)
|
||||
import_data_from_gh_pages()
|
||||
|
||||
hyperfine = prebuilt.load_hyperfine()
|
||||
|
||||
run([
|
||||
hyperfine, "--ignore-failure", "--export-json", benchmark_file,
|
||||
"--warmup", "3"
|
||||
] + [
|
||||
deno_path + " " + " ".join(args) for [_, args] in exec_time_benchmarks
|
||||
])
|
||||
all_data = read_json(all_data_file)
|
||||
benchmark_data = read_json(benchmark_file)
|
||||
sha1 = run_output(["git", "rev-parse", "HEAD"]).strip()
|
||||
new_data = {
|
||||
"created_at": time.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"sha1": sha1,
|
||||
"binary_size": {},
|
||||
"thread_count": {},
|
||||
"syscall_count": {},
|
||||
"benchmark": {}
|
||||
}
|
||||
for [[name, _], data] in zip(exec_time_benchmarks,
|
||||
benchmark_data["results"]):
|
||||
new_data["benchmark"][name] = {
|
||||
"mean": data["mean"],
|
||||
"stddev": data["stddev"],
|
||||
"user": data["user"],
|
||||
"system": data["system"],
|
||||
"min": data["min"],
|
||||
"max": data["max"]
|
||||
}
|
||||
|
||||
# TODO(ry) The "benchmark" benchmark should actually be called "exec_time".
|
||||
# When this is changed, the historical data in gh-pages branch needs to be
|
||||
# changed too.
|
||||
new_data["benchmark"] = run_exec_time(deno_exe, build_dir)
|
||||
|
||||
new_data["binary_size"] = get_binary_sizes(build_dir)
|
||||
|
||||
# Cannot run throughput benchmark on windows because they don't have nc or
|
||||
# pipe.
|
||||
if os.name != 'nt':
|
||||
hyper_hello_path = os.path.join(build_dir, "hyper_hello")
|
||||
core_http_bench_exe = os.path.join(build_dir, "deno_core_http_bench")
|
||||
new_data["throughput"] = run_throughput(deno_path)
|
||||
stats = http_benchmark(deno_path, hyper_hello_path,
|
||||
core_http_bench_exe)
|
||||
new_data["req_per_sec"] = {
|
||||
k: v["req_per_sec"]
|
||||
for k, v in stats.items()
|
||||
}
|
||||
new_data["max_latency"] = {
|
||||
k: v["max_latency"]
|
||||
for k, v in stats.items()
|
||||
}
|
||||
if "linux" in sys.platform:
|
||||
# Thread count test, only on linux
|
||||
new_data["thread_count"] = run_thread_count_benchmark(deno_path)
|
||||
new_data["syscall_count"] = run_syscall_count_benchmark(deno_path)
|
||||
new_data["max_memory"] = run_max_mem_benchmark(deno_path)
|
||||
new_data["throughput"] = run_throughput(deno_exe)
|
||||
run_http(build_dir, new_data)
|
||||
|
||||
if "linux" in sys.platform:
|
||||
new_data["thread_count"] = run_thread_count_benchmark(deno_exe)
|
||||
new_data["syscall_count"] = run_syscall_count_benchmark(deno_exe)
|
||||
new_data["max_memory"] = run_max_mem_benchmark(deno_exe)
|
||||
|
||||
print "===== <BENCHMARK RESULTS>"
|
||||
print json.dumps(new_data, indent=2)
|
||||
print "===== </BENCHMARK RESULTS>"
|
||||
|
||||
all_data = read_json(all_data_file)
|
||||
all_data.append(new_data)
|
||||
|
||||
write_json(all_data_file, all_data)
|
||||
write_json(recent_data_file, all_data[-20:])
|
||||
|
||||
|
|
|
@ -58,10 +58,10 @@ def hyper_http_benchmark(hyper_hello_exe):
|
|||
return run(hyper_cmd)
|
||||
|
||||
|
||||
def http_benchmark(deno_exe, hyper_hello_exe, core_http_bench_exe):
|
||||
|
||||
# TODO Rename to "deno_tcp"
|
||||
|
||||
def http_benchmark(build_dir):
|
||||
hyper_hello_exe = os.path.join(build_dir, "hyper_hello")
|
||||
core_http_bench_exe = os.path.join(build_dir, "deno_core_http_bench")
|
||||
deno_exe = os.path.join(build_dir, "deno")
|
||||
return {
|
||||
"deno": deno_http_benchmark(deno_exe),
|
||||
"deno_net_http": deno_net_http_benchmark(deno_exe),
|
||||
|
|
Loading…
Reference in a new issue