1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-29 16:30:56 -05:00
denoland-deno/tools/benchmark.py

233 lines
7.5 KiB
Python
Raw Normal View History

2018-09-21 12:06:59 -04:00
#!/usr/bin/env python
2019-01-01 19:58:40 -05:00
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
2018-09-21 12:06:59 -04:00
# Performs benchmark and append data to //website/data.json.
2018-11-30 03:27:41 -05:00
# If //website/data.json doesn't exist, this script tries to import it from
# gh-pages branch.
2018-09-21 12:06:59 -04:00
# To view the results locally run ./tools/http_server.py and visit
# http://localhost:4545/website
import os
import sys
import json
import time
import shutil
2018-09-25 20:08:09 -04:00
from util import run, run_output, root_path, build_path, executable_suffix
2018-09-24 18:12:52 -04:00
import tempfile
import http_server
2018-10-11 16:55:22 -04:00
import throughput_benchmark
2018-10-15 16:44:35 -04:00
from http_benchmark import http_benchmark
import prebuilt
2018-09-21 12:06:59 -04:00
# The list of the tuples of the benchmark name and arguments
2018-09-25 04:28:56 -04:00
exec_time_benchmarks = [
("hello", ["tests/002_hello.ts"]),
("relative_import", ["tests/003_relative_import.ts"]),
("error_001", ["tests/error_001.ts"]),
2019-04-06 18:13:06 -04:00
("cold_hello", ["--reload", "tests/002_hello.ts"]),
("cold_relative_import", ["--reload", "tests/003_relative_import.ts"]),
2019-04-05 15:57:59 -04:00
("workers_startup", ["tests/workers_startup_bench.ts"]),
("workers_round_robin", ["tests/workers_round_robin_bench.ts"]),
2018-09-25 04:28:56 -04:00
]
2018-09-21 12:06:59 -04:00
gh_pages_data_file = "gh-pages/data.json"
all_data_file = "website/data.json" # Includes all benchmark data.
recent_data_file = "website/recent.json" # Includes recent 20 benchmark data.
2018-09-21 12:06:59 -04:00
def read_json(filename):
with open(filename) as json_file:
return json.load(json_file)
def write_json(filename, data):
with open(filename, 'w') as outfile:
json.dump(data, outfile)
def import_data_from_gh_pages():
if os.path.exists(all_data_file):
2018-09-21 12:06:59 -04:00
return
try:
run([
"git", "clone", "--depth", "1", "-b", "gh-pages",
"https://github.com/denoland/deno.git", "gh-pages"
])
shutil.copy(gh_pages_data_file, all_data_file)
2018-11-30 03:27:41 -05:00
except ValueError:
write_json(all_data_file, []) # writes empty json data
2018-09-21 12:06:59 -04:00
2018-09-25 20:08:09 -04:00
def get_binary_sizes(build_dir):
path_dict = {
2019-02-11 12:57:26 -05:00
"deno":
os.path.join(build_dir, "deno" + executable_suffix),
"main.js":
os.path.join(build_dir, "gen/cli/bundle/main.js"),
2019-02-11 12:57:26 -05:00
"main.js.map":
os.path.join(build_dir, "gen/cli/bundle/main.js.map"),
2019-02-11 12:57:26 -05:00
"compiler.js":
os.path.join(build_dir, "gen/cli/bundle/compiler.js"),
2019-02-11 12:57:26 -05:00
"compiler.js.map":
os.path.join(build_dir, "gen/cli/bundle/compiler.js.map"),
2019-02-11 12:57:26 -05:00
"snapshot_deno.bin":
os.path.join(build_dir, "gen/cli/snapshot_deno.bin"),
2019-02-11 12:57:26 -05:00
"snapshot_compiler.bin":
os.path.join(build_dir, "gen/cli/snapshot_compiler.bin")
2018-09-25 20:08:09 -04:00
}
sizes = {}
for name, path in path_dict.items():
assert os.path.exists(path)
2018-09-25 20:08:09 -04:00
sizes[name] = os.path.getsize(path)
return sizes
def get_strace_summary_text(test_args):
2018-09-24 18:12:52 -04:00
f = tempfile.NamedTemporaryFile()
run(["strace", "-c", "-f", "-o", f.name] + test_args)
return f.read()
def strace_parse(summary_text):
summary = {}
# clear empty lines
lines = list(filter(lambda x: x and x != "\n", summary_text.split("\n")))
if len(lines) < 4:
return {} # malformed summary
lines, total_line = lines[2:-2], lines[-1]
# data to dict for each line
for line in lines:
syscall_fields = line.split()
syscall_name = syscall_fields[-1]
syscall_dict = {}
if 5 <= len(syscall_fields) <= 6:
syscall_dict = {
"% time": float(syscall_fields[0]),
"seconds": float(syscall_fields[1]),
"usecs/call": int(syscall_fields[2]),
"calls": int(syscall_fields[3])
}
syscall_dict["errors"] = 0 if len(syscall_fields) < 6 else int(
syscall_fields[4])
summary[syscall_name] = syscall_dict
# record overall (total) data
total_fields = total_line.split()
summary["total"] = {
"% time": float(total_fields[0]),
"seconds": float(total_fields[1]),
"calls": int(total_fields[2]),
"errors": int(total_fields[3])
}
return summary
def get_strace_summary(test_args):
return strace_parse(get_strace_summary_text(test_args))
2018-09-24 18:12:52 -04:00
def run_thread_count_benchmark(deno_path):
thread_count_map = {}
thread_count_map["set_timeout"] = get_strace_summary([
2019-04-06 18:13:06 -04:00
deno_path, "--reload", "tests/004_set_timeout.ts"
])["clone"]["calls"] + 1
thread_count_map["fetch_deps"] = get_strace_summary([
2019-04-06 18:13:06 -04:00
deno_path, "--reload", "--allow-net", "tests/fetch_deps.ts"
])["clone"]["calls"] + 1
2018-09-24 18:12:52 -04:00
return thread_count_map
2018-10-11 16:55:22 -04:00
def run_throughput(deno_exe):
m = {}
m["100M_tcp"] = throughput_benchmark.tcp(deno_exe, 100)
m["100M_cat"] = throughput_benchmark.cat(deno_exe, 100)
m["10M_tcp"] = throughput_benchmark.tcp(deno_exe, 10)
m["10M_cat"] = throughput_benchmark.cat(deno_exe, 10)
return m
def run_syscall_count_benchmark(deno_path):
syscall_count_map = {}
syscall_count_map["hello"] = get_strace_summary(
2019-04-06 18:13:06 -04:00
[deno_path, "--reload", "tests/002_hello.ts"])["total"]["calls"]
syscall_count_map["fetch_deps"] = get_strace_summary(
2019-04-06 18:13:06 -04:00
[deno_path, "--reload", "--allow-net",
"tests/fetch_deps.ts"])["total"]["calls"]
return syscall_count_map
2018-09-21 12:06:59 -04:00
def main(argv):
if len(argv) == 2:
build_dir = sys.argv[1]
elif len(argv) == 1:
build_dir = build_path()
else:
print "Usage: tools/benchmark.py [build_dir]"
sys.exit(1)
2018-09-25 20:08:09 -04:00
http_server.spawn()
2018-09-21 12:06:59 -04:00
deno_path = os.path.join(build_dir, "deno")
benchmark_file = os.path.join(build_dir, "benchmark.json")
os.chdir(root_path)
import_data_from_gh_pages()
2019-02-14 18:18:24 -05:00
hyperfine = prebuilt.load_hyperfine()
run([
2019-02-14 18:18:24 -05:00
hyperfine, "--ignore-failure", "--export-json", benchmark_file,
"--warmup", "3"
] + [
deno_path + " " + " ".join(args) for [_, args] in exec_time_benchmarks
])
all_data = read_json(all_data_file)
2018-09-21 12:06:59 -04:00
benchmark_data = read_json(benchmark_file)
sha1 = run_output(["git", "rev-parse", "HEAD"]).strip()
new_data = {
"created_at": time.strftime("%Y-%m-%dT%H:%M:%SZ"),
"sha1": sha1,
2018-09-25 20:08:09 -04:00
"binary_size": {},
2018-09-24 18:12:52 -04:00
"thread_count": {},
"syscall_count": {},
2018-09-21 12:06:59 -04:00
"benchmark": {}
}
for [[name, _], data] in zip(exec_time_benchmarks,
benchmark_data["results"]):
2018-09-21 12:06:59 -04:00
new_data["benchmark"][name] = {
"mean": data["mean"],
"stddev": data["stddev"],
"user": data["user"],
"system": data["system"],
"min": data["min"],
"max": data["max"]
}
2018-09-24 18:12:52 -04:00
2018-09-25 20:08:09 -04:00
new_data["binary_size"] = get_binary_sizes(build_dir)
2018-10-11 16:55:22 -04:00
# Cannot run throughput benchmark on windows because they don't have nc or
# pipe.
if os.name != 'nt':
hyper_hello_path = os.path.join(build_dir, "hyper_hello")
core_http_bench_exe = os.path.join(build_dir, "deno_core_http_bench")
2018-10-11 16:55:22 -04:00
new_data["throughput"] = run_throughput(deno_path)
2019-03-24 23:36:27 -04:00
stats = http_benchmark(deno_path, hyper_hello_path,
core_http_bench_exe)
new_data["req_per_sec"] = {
k: v["req_per_sec"]
for k, v in stats.items()
}
new_data["max_latency"] = {
k: v["max_latency"]
for k, v in stats.items()
}
2018-09-24 18:12:52 -04:00
if "linux" in sys.platform:
# Thread count test, only on linux
new_data["thread_count"] = run_thread_count_benchmark(deno_path)
new_data["syscall_count"] = run_syscall_count_benchmark(deno_path)
2018-09-24 18:12:52 -04:00
2018-09-21 12:06:59 -04:00
all_data.append(new_data)
write_json(all_data_file, all_data)
write_json(recent_data_file, all_data[-20:])
2018-09-21 12:06:59 -04:00
if __name__ == '__main__':
main(sys.argv)