1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-23 15:49:44 -05:00

feat(bench): add BenchContext::start() and BenchContext::end() (#18734)

Closes #17589.
```ts
Deno.bench("foo", async (t) => {
  const resource = setup(); // not included in measurement
  t.start();
  measuredOperation(resource);
  t.end();
  resource.close(); // not included in measurement
});
```
This commit is contained in:
Nayeem Rahman 2023-07-31 11:02:59 +01:00 committed by GitHub
parent e348c11b64
commit 02865cb5a2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 258 additions and 40 deletions

View file

@ -574,6 +574,15 @@ function withPermissions(fn, permissions) {
/** @type {Map<number, TestState | TestStepState>} */
const testStates = new Map();
/** @type {number | null} */
let currentBenchId = null;
// These local variables are used to track time measurements at
// `BenchContext::{start,end}` calls. They are global instead of using a state
// map to minimise the overhead of assigning them.
/** @type {number | null} */
let currentBenchUserExplicitStart = null;
/** @type {number | null} */
let currentBenchUserExplicitEnd = null;
// Main test function provided by Deno.
function test(
@ -825,10 +834,11 @@ function benchStats(n, highPrecision, avg, min, max, all) {
};
}
async function benchMeasure(timeBudget, fn, async) {
async function benchMeasure(timeBudget, fn, async, context) {
let n = 0;
let avg = 0;
let wavg = 0;
let usedExplicitTimers = false;
const all = [];
let min = Infinity;
let max = -Infinity;
@ -842,61 +852,101 @@ async function benchMeasure(timeBudget, fn, async) {
if (!async) {
while (budget > 0 || iterations-- > 0) {
const t1 = benchNow();
fn();
const iterationTime = benchNow() - t1;
fn(context);
const t2 = benchNow();
const totalTime = t2 - t1;
let measuredTime = totalTime;
if (currentBenchUserExplicitStart !== null) {
measuredTime -= currentBenchUserExplicitStart - t1;
currentBenchUserExplicitStart = null;
usedExplicitTimers = true;
}
if (currentBenchUserExplicitEnd !== null) {
measuredTime -= t2 - currentBenchUserExplicitEnd;
currentBenchUserExplicitEnd = null;
usedExplicitTimers = true;
}
c++;
wavg += iterationTime;
budget -= iterationTime;
wavg += measuredTime;
budget -= totalTime;
}
} else {
while (budget > 0 || iterations-- > 0) {
const t1 = benchNow();
await fn();
const iterationTime = benchNow() - t1;
await fn(context);
const t2 = benchNow();
const totalTime = t2 - t1;
let measuredTime = totalTime;
if (currentBenchUserExplicitStart !== null) {
measuredTime -= currentBenchUserExplicitStart - t1;
currentBenchUserExplicitStart = null;
usedExplicitTimers = true;
}
if (currentBenchUserExplicitEnd !== null) {
measuredTime -= t2 - currentBenchUserExplicitEnd;
currentBenchUserExplicitEnd = null;
usedExplicitTimers = true;
}
c++;
wavg += iterationTime;
budget -= iterationTime;
wavg += measuredTime;
budget -= totalTime;
}
}
wavg /= c;
// measure step
if (wavg > lowPrecisionThresholdInNs) {
if (wavg > lowPrecisionThresholdInNs || usedExplicitTimers) {
let iterations = 10;
let budget = timeBudget * 1e6;
if (!async) {
while (budget > 0 || iterations-- > 0) {
const t1 = benchNow();
fn();
const iterationTime = benchNow() - t1;
fn(context);
const t2 = benchNow();
const totalTime = t2 - t1;
let measuredTime = totalTime;
if (currentBenchUserExplicitStart !== null) {
measuredTime -= currentBenchUserExplicitStart - t1;
currentBenchUserExplicitStart = null;
}
if (currentBenchUserExplicitEnd !== null) {
measuredTime -= t2 - currentBenchUserExplicitEnd;
currentBenchUserExplicitEnd = null;
}
n++;
avg += iterationTime;
budget -= iterationTime;
ArrayPrototypePush(all, iterationTime);
if (iterationTime < min) min = iterationTime;
if (iterationTime > max) max = iterationTime;
avg += measuredTime;
budget -= totalTime;
ArrayPrototypePush(all, measuredTime);
if (measuredTime < min) min = measuredTime;
if (measuredTime > max) max = measuredTime;
}
} else {
while (budget > 0 || iterations-- > 0) {
const t1 = benchNow();
await fn();
const iterationTime = benchNow() - t1;
await fn(context);
const t2 = benchNow();
const totalTime = t2 - t1;
let measuredTime = totalTime;
if (currentBenchUserExplicitStart !== null) {
measuredTime -= currentBenchUserExplicitStart - t1;
currentBenchUserExplicitStart = null;
}
if (currentBenchUserExplicitEnd !== null) {
measuredTime -= t2 - currentBenchUserExplicitEnd;
currentBenchUserExplicitEnd = null;
}
n++;
avg += iterationTime;
budget -= iterationTime;
ArrayPrototypePush(all, iterationTime);
if (iterationTime < min) min = iterationTime;
if (iterationTime > max) max = iterationTime;
avg += measuredTime;
budget -= totalTime;
ArrayPrototypePush(all, measuredTime);
if (measuredTime < min) min = measuredTime;
if (measuredTime > max) max = measuredTime;
}
}
} else {
@ -906,7 +956,11 @@ async function benchMeasure(timeBudget, fn, async) {
if (!async) {
while (budget > 0 || iterations-- > 0) {
const t1 = benchNow();
for (let c = 0; c < lowPrecisionThresholdInNs; c++) fn();
for (let c = 0; c < lowPrecisionThresholdInNs; c++) {
fn(context);
currentBenchUserExplicitStart = null;
currentBenchUserExplicitEnd = null;
}
const iterationTime = (benchNow() - t1) / lowPrecisionThresholdInNs;
n++;
@ -919,7 +973,11 @@ async function benchMeasure(timeBudget, fn, async) {
} else {
while (budget > 0 || iterations-- > 0) {
const t1 = benchNow();
for (let c = 0; c < lowPrecisionThresholdInNs; c++) await fn();
for (let c = 0; c < lowPrecisionThresholdInNs; c++) {
await fn(context);
currentBenchUserExplicitStart = null;
currentBenchUserExplicitEnd = null;
}
const iterationTime = (benchNow() - t1) / lowPrecisionThresholdInNs;
n++;
@ -936,12 +994,45 @@ async function benchMeasure(timeBudget, fn, async) {
return benchStats(n, wavg > lowPrecisionThresholdInNs, avg, min, max, all);
}
/** @param desc {BenchDescription} */
function createBenchContext(desc) {
return {
[SymbolToStringTag]: "BenchContext",
name: desc.name,
origin: desc.origin,
start() {
if (currentBenchId !== desc.id) {
throw new TypeError(
"The benchmark which this context belongs to is not being executed.",
);
}
if (currentBenchUserExplicitStart != null) {
throw new TypeError("BenchContext::start() has already been invoked.");
}
currentBenchUserExplicitStart = benchNow();
},
end() {
const end = benchNow();
if (currentBenchId !== desc.id) {
throw new TypeError(
"The benchmark which this context belongs to is not being executed.",
);
}
if (currentBenchUserExplicitEnd != null) {
throw new TypeError("BenchContext::end() has already been invoked.");
}
currentBenchUserExplicitEnd = end;
},
};
}
/** Wrap a user benchmark function in one which returns a structured result. */
function wrapBenchmark(desc) {
const fn = desc.fn;
return async function outerWrapped() {
let token = null;
const originalConsole = globalThis.console;
currentBenchId = desc.id;
try {
globalThis.console = new Console((s) => {
@ -962,13 +1053,17 @@ function wrapBenchmark(desc) {
}
const benchTimeInMs = 500;
const stats = await benchMeasure(benchTimeInMs, fn, desc.async);
const context = createBenchContext(desc);
const stats = await benchMeasure(benchTimeInMs, fn, desc.async, context);
return { ok: stats };
} catch (error) {
return { failed: core.destructureError(error) };
} finally {
globalThis.console = originalConsole;
currentBenchId = null;
currentBenchUserExplicitStart = null;
currentBenchUserExplicitEnd = null;
if (bench.sanitizeExit) setExitHandler(null);
if (token !== null) restorePermissions(token);
}

View file

@ -181,6 +181,12 @@ itest!(check_local_by_default2 {
exit_code: 1,
});
itest!(bench_explicit_start_end {
args: "bench --quiet -A bench/explicit_start_and_end.ts",
output: "bench/explicit_start_and_end.out",
exit_code: 1,
});
itest!(bench_with_config {
args: "bench --config bench/collect/deno.jsonc bench/collect",
exit_code: 0,

View file

@ -5130,7 +5130,7 @@ fn lsp_completions_auto_import() {
"source": "./b.ts",
"data": {
"exportName": "foo",
"exportMapKey": "foo|6802|file:///a/b",
"exportMapKey": "foo|6811|file:///a/b",
"moduleSpecifier": "./b.ts",
"fileName": "file:///a/b.ts"
},

View file

@ -0,0 +1,25 @@
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/explicit_start_and_end.ts
benchmark time (avg) (min … max) p75 p99 p995
----------------------------------------------------- -----------------------------
start and end [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
start only [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
end only [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
double start error: TypeError: BenchContext::start() has already been invoked.
t.start();
^
at BenchContext.start ([WILDCARD])
at [WILDCARD]/explicit_start_and_end.ts:[WILDCARD]
double end error: TypeError: BenchContext::end() has already been invoked.
t.end();
^
at BenchContext.end ([WILDCARD])
at [WILDCARD]/explicit_start_and_end.ts:[WILDCARD]
captured error: TypeError: The benchmark which this context belongs to is not being executed.
captured!.start();
^
at BenchContext.start ([WILDCARD])
at [WILDCARD]/explicit_start_and_end.ts:[WILDCARD]
error: Bench failed

View file

@ -0,0 +1,50 @@
Deno.bench("start and end", (t) => {
const id = setInterval(() => {}, 1000);
t.start();
Deno.inspect(id);
t.end();
clearInterval(id);
});
Deno.bench("start only", (t) => {
const id = setInterval(() => {}, 1000);
t.start();
Deno.inspect(id);
clearInterval(id);
});
Deno.bench("end only", (t) => {
const id = setInterval(() => {}, 1000);
Deno.inspect(id);
t.end();
clearInterval(id);
});
Deno.bench("double start", (t) => {
const id = setInterval(() => {}, 1000);
t.start();
t.start();
Deno.inspect(id);
t.end();
clearInterval(id);
});
let captured: Deno.BenchContext;
Deno.bench("double end", (t) => {
captured = t;
const id = setInterval(() => {}, 1000);
t.start();
Deno.inspect(id);
t.end();
t.end();
clearInterval(id);
});
Deno.bench("captured", () => {
const id = setInterval(() => {}, 1000);
captured!.start();
Deno.inspect(id);
captured!.end();
clearInterval(id);
});

View file

@ -6,7 +6,7 @@ Deno.openKv;
'open' is declared here.
export function open(
~~~~
at asset:///lib.deno.ns.d.ts:1667:19
at asset:///lib.deno.ns.d.ts:[WILDCARD]:19
TS2339 [ERROR]: Property 'createHttpClient' does not exist on type 'typeof Deno'. 'Deno.createHttpClient' is an unstable API. Did you forget to run with the '--unstable' flag? If not, try changing the 'lib' compiler option to include 'deno.unstable' or add a triple-slash directive to your entrypoint: /// <reference lib="deno.unstable" />
Deno.createHttpClient;

View file

@ -995,6 +995,48 @@ declare namespace Deno {
fn: (t: TestContext) => void | Promise<void>,
): void;
/**
* Context that is passed to a benchmarked function. The instance is shared
* between iterations of the benchmark. Its methods can be used for example
* to override of the measured portion of the function.
*
* @category Testing
*/
export interface BenchContext {
/** The current benchmark name. */
name: string;
/** The string URL of the current benchmark. */
origin: string;
/** Restarts the timer for the bench measurement. This should be called
* after doing setup work which should not be measured.
*
* ```ts
* Deno.bench("foo", async (t) => {
* const data = await Deno.readFile("data.txt");
* t.start();
* // some operation on `data`...
* });
* ```
*/
start(): void;
/** End the timer early for the bench measurement. This should be called
* before doing teardown work which should not be measured.
*
* ```ts
* Deno.bench("foo", async (t) => {
* const file = await Deno.open("data.txt");
* t.start();
* // some operation on `file`...
* t.end();
* file.close();
* });
* ```
*/
end(): void;
}
/**
* The interface for defining a benchmark test using {@linkcode Deno.bench}.
*
@ -1002,7 +1044,7 @@ declare namespace Deno {
*/
export interface BenchDefinition {
/** The test function which will be benchmarked. */
fn: () => void | Promise<void>;
fn: (b: BenchContext) => void | Promise<void>;
/** The name of the test, which will be used in displaying the results. */
name: string;
/** If truthy, the benchmark test will be ignored/skipped. */
@ -1073,7 +1115,7 @@ declare namespace Deno {
*
* @category Testing
*/
export function bench(t: BenchDefinition): void;
export function bench(b: BenchDefinition): void;
/**
* Register a benchmark test which will be run when `deno bench` is used on
@ -1100,7 +1142,7 @@ declare namespace Deno {
*/
export function bench(
name: string,
fn: () => void | Promise<void>,
fn: (b: BenchContext) => void | Promise<void>,
): void;
/**
@ -1126,7 +1168,7 @@ declare namespace Deno {
*
* @category Testing
*/
export function bench(fn: () => void | Promise<void>): void;
export function bench(fn: (b: BenchContext) => void | Promise<void>): void;
/**
* Register a benchmark test which will be run when `deno bench` is used on
@ -1162,7 +1204,7 @@ declare namespace Deno {
export function bench(
name: string,
options: Omit<BenchDefinition, "fn" | "name">,
fn: () => void | Promise<void>,
fn: (b: BenchContext) => void | Promise<void>,
): void;
/**
@ -1196,7 +1238,7 @@ declare namespace Deno {
*/
export function bench(
options: Omit<BenchDefinition, "fn">,
fn: () => void | Promise<void>,
fn: (b: BenchContext) => void | Promise<void>,
): void;
/**
@ -1230,7 +1272,7 @@ declare namespace Deno {
*/
export function bench(
options: Omit<BenchDefinition, "fn" | "name">,
fn: () => void | Promise<void>,
fn: (b: BenchContext) => void | Promise<void>,
): void;
/** Exit the Deno process with optional exit code.