mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 07:14:47 -05:00
chore: remove op_baseline and flamebench (#21120)
ops are better tested in deno_core, and flamebench has rotted quite a bit.
This commit is contained in:
parent
f8d1d84c5a
commit
5e82fce0a0
4 changed files with 0 additions and 195 deletions
|
@ -20,10 +20,6 @@ deno_core.workspace = true
|
|||
once_cell.workspace = true
|
||||
tokio.workspace = true
|
||||
|
||||
[[bench]]
|
||||
name = "op_baseline"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "utf8"
|
||||
harness = false
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use deno_bench_util::bench_js_async;
|
||||
use deno_bench_util::bench_js_sync;
|
||||
use deno_bench_util::bench_or_profile;
|
||||
use deno_bench_util::bencher::benchmark_group;
|
||||
use deno_bench_util::bencher::Bencher;
|
||||
|
||||
use deno_core::op2;
|
||||
use deno_core::Extension;
|
||||
|
||||
deno_core::extension!(bench_setup, ops = [op_pi_json, op_pi_async, op_nop]);
|
||||
|
||||
fn setup() -> Vec<Extension> {
|
||||
vec![bench_setup::init_ops()]
|
||||
}
|
||||
|
||||
#[op2(fast)]
|
||||
fn op_nop() {}
|
||||
|
||||
#[op2(fast)]
|
||||
#[number]
|
||||
fn op_pi_json() -> i64 {
|
||||
314159
|
||||
}
|
||||
|
||||
// this is a function since async closures aren't stable
|
||||
#[op2(async)]
|
||||
#[number]
|
||||
async fn op_pi_async() -> i64 {
|
||||
314159
|
||||
}
|
||||
|
||||
fn bench_op_pi_json(b: &mut Bencher) {
|
||||
bench_js_sync(b, r#"Deno.core.ops.op_pi_json();"#, setup);
|
||||
}
|
||||
|
||||
fn bench_op_nop(b: &mut Bencher) {
|
||||
bench_js_sync(b, r#"Deno.core.ops.op_nop();"#, setup);
|
||||
}
|
||||
|
||||
fn bench_op_async(b: &mut Bencher) {
|
||||
bench_js_async(b, r#"Deno.core.opAsync("op_pi_async");"#, setup);
|
||||
}
|
||||
|
||||
benchmark_group!(benches, bench_op_pi_json, bench_op_nop, bench_op_async,);
|
||||
|
||||
bench_or_profile!(benches);
|
|
@ -31,35 +31,6 @@ executable
|
|||
cargo run -- run --allow-read --allow-write --allow-run --unstable ./tools/<script>
|
||||
```
|
||||
|
||||
## flamebench.js
|
||||
|
||||
`flamebench.js` facilitates profiling and generating flamegraphs from
|
||||
benchmarks.
|
||||
|
||||
General usage:
|
||||
|
||||
```
|
||||
❯ ./tools/flamebench.js
|
||||
flamebench <bench_name> [bench_filter]
|
||||
|
||||
Available benches:
|
||||
op_baseline
|
||||
ser
|
||||
de
|
||||
```
|
||||
|
||||
To profile the `op_baseline` bench, run `./tools/flamebench.js op_baseline`,
|
||||
this will run all 3 benches in `op_baseline.
|
||||
|
||||
Often when profiling/optimizing, you'll want to focus on a specific sub-bench,
|
||||
`flamebench` supports a bench/test filter arg like the regular cargo commands.
|
||||
So you can simply run `./tools/flamebench.js op_baseline bench_op_async` or
|
||||
`./tools/flamebench.js op_baseline bench_op_nop` to profile specific benches.
|
||||
|
||||
Tip: the `[bench_filter]` argument doesn't have to be an exact bench name, you
|
||||
can use a shorthand or a partial match to profile a group of benches, e.g:
|
||||
`./tools/flamebench.js de v8`
|
||||
|
||||
## copyright_checker.js
|
||||
|
||||
`copyright_checker.js` is used to check copyright headers in the codebase.
|
||||
|
|
|
@ -1,114 +0,0 @@
|
|||
#!/usr/bin/env -S deno run --unstable --allow-read --allow-run
|
||||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
import { join, ROOT_PATH as ROOT } from "./util.js";
|
||||
|
||||
const { 0: benchName, 1: benchFilter } = Deno.args;
|
||||
// Print usage if no bench specified
|
||||
if (!benchName) {
|
||||
console.log("flamebench.js <bench_name> [bench_filter]");
|
||||
// Also show available benches
|
||||
console.log("\nAvailable benches:");
|
||||
const benches = await availableBenches();
|
||||
console.log(benches.join("\n"));
|
||||
return Deno.exit(1);
|
||||
}
|
||||
|
||||
// List available benches, hoping we don't have any benches called "ls" :D
|
||||
if (benchName === "ls") {
|
||||
const benches = await availableBenches();
|
||||
console.log(benches.join("\n"));
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure flamegraph is installed
|
||||
if (!await binExists("flamegraph")) {
|
||||
console.log(
|
||||
"flamegraph (https://github.com/flamegraph-rs/flamegraph) not found, please run:",
|
||||
);
|
||||
console.log();
|
||||
console.log("cargo install flamegraph");
|
||||
return Deno.exit(1);
|
||||
}
|
||||
|
||||
// Build bench with frame pointers
|
||||
await bashThrough(
|
||||
`RUSTFLAGS='-C force-frame-pointers=y' cargo build --release --bench ${benchName}`,
|
||||
);
|
||||
|
||||
// Get the freshly built bench binary
|
||||
const benchBin = await latestBenchBin(benchName);
|
||||
|
||||
// Run flamegraph
|
||||
const outputFile = join(ROOT, "flamebench.svg");
|
||||
await runFlamegraph(benchBin, benchFilter, outputFile);
|
||||
|
||||
// Open flamegraph (in your browser / SVG viewer)
|
||||
if (await binExists("open")) {
|
||||
await bashThrough(`open ${outputFile}`);
|
||||
}
|
||||
|
||||
async function availableBenches() {
|
||||
// TODO(AaronO): maybe reimplement with fs.walk
|
||||
// it's important to prune the walked tree so this is fast (<50ms)
|
||||
const prunedDirs = ["third_party", ".git", "target", "docs", "test_util"];
|
||||
const pruneExpr = prunedDirs.map((d) => `-path ${ROOT}/${d}`).join(" -o ");
|
||||
return (await bashOut(`
|
||||
find ${ROOT} -type d \
|
||||
\\( ${pruneExpr} \\) \
|
||||
-prune -false -o \
|
||||
-path "${ROOT}/*/benches/*" -type f -name "*.rs" \
|
||||
| xargs basename | cut -f1 -d.
|
||||
`)).split("\n");
|
||||
}
|
||||
|
||||
function latestBenchBin(name) {
|
||||
return bashOut(`ls -t "${ROOT}/target/release/deps/${name}"* | head -n 1`);
|
||||
}
|
||||
|
||||
function runFlamegraph(benchBin, benchFilter, outputFile) {
|
||||
return bashThrough(
|
||||
`sudo -E flamegraph -o ${outputFile} ${benchBin} ${benchFilter ?? ""}`,
|
||||
// Set $PROFILING env so benches can improve their flamegraphs
|
||||
{ env: { "PROFILING": "1" } },
|
||||
);
|
||||
}
|
||||
|
||||
async function bashOut(subcmd) {
|
||||
const { success, stdout } = await new Deno.Command("bash", {
|
||||
args: ["-c", subcmd],
|
||||
stdout: "piped",
|
||||
stderr: "null",
|
||||
}).output();
|
||||
|
||||
// Check for failure
|
||||
if (!success) {
|
||||
throw new Error("subcmd failed");
|
||||
}
|
||||
// Gather output
|
||||
const output = new TextDecoder().decode(stdout);
|
||||
|
||||
return output.trim();
|
||||
}
|
||||
|
||||
async function bashThrough(subcmd, opts = {}) {
|
||||
const { success, code } = await new Deno.Command("bash", {
|
||||
...opts,
|
||||
args: ["-c", subcmd],
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
}).output();
|
||||
|
||||
// Exit process on failure
|
||||
if (!success) {
|
||||
Deno.exit(code);
|
||||
}
|
||||
}
|
||||
|
||||
async function binExists(bin) {
|
||||
try {
|
||||
await bashOut(`which ${bin}`);
|
||||
return true;
|
||||
} catch (_) {
|
||||
return false;
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue