1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

chore: migrate bench, publish, and more itests to spec tests (#23584)

This commit is contained in:
David Sherret 2024-04-29 10:08:27 -04:00 committed by GitHub
parent b02ffec37c
commit da52058a94
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
352 changed files with 1380 additions and 871 deletions

4
Cargo.lock generated
View file

@ -2633,9 +2633,9 @@ checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f"
[[package]]
name = "file_test_runner"
version = "0.4.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b66e9ef00f9f6b82b030b7a9d659030f73498921d4c021b0772e75dfd7090d80"
checksum = "5f33b00489de0a5fd03df89aefe9fa55da5da3c1a207ea19cd381d1de7e6204b"
dependencies = [
"anyhow",
"crossbeam-channel",

View file

@ -43,7 +43,7 @@ deno_lockfile.workspace = true
deno_terminal.workspace = true
deno_tls.workspace = true
fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] }
file_test_runner = "0.4.0"
file_test_runner = "0.4.1"
flaky_test = "=0.1.0"
http.workspace = true
http-body-util.workspace = true

View file

@ -3,223 +3,11 @@
use deno_core::serde_json::json;
use deno_core::url::Url;
use test_util as util;
use test_util::itest;
use test_util::itest_flaky;
use util::assert_contains;
use util::assert_not_contains;
use util::env_vars_for_npm_tests;
use util::TestContext;
use util::TestContextBuilder;
itest!(overloads {
args: "bench bench/overloads.ts",
exit_code: 0,
output: "bench/overloads.out",
});
itest!(meta {
args: "bench bench/meta.ts",
exit_code: 0,
output: "bench/meta.out",
});
itest!(pass {
args: "bench bench/pass.ts",
exit_code: 0,
output: "bench/pass.out",
});
itest!(ignore {
args: "bench bench/ignore.ts",
exit_code: 0,
output: "bench/ignore.out",
});
itest!(ignore_permissions {
args: "bench bench/ignore_permissions.ts",
exit_code: 0,
output: "bench/ignore_permissions.out",
});
itest!(fail {
args: "bench bench/fail.ts",
exit_code: 1,
output: "bench/fail.out",
});
itest!(bench_formatting {
args: "bench bench/bench_formatting.ts",
exit_code: 0,
output: "bench/bench_formatting.out",
});
itest!(collect {
args: "bench --ignore=bench/collect/ignore bench/collect",
exit_code: 0,
output: "bench/collect.out",
});
itest!(load_unload {
args: "bench bench/load_unload.ts",
exit_code: 0,
output: "bench/load_unload.out",
});
itest!(interval {
args: "bench bench/interval.ts",
exit_code: 0,
output: "bench/interval.out",
});
itest!(quiet {
args: "bench --quiet bench/quiet.ts",
exit_code: 0,
output: "bench/quiet.out",
});
itest!(only {
args: "bench bench/only.ts",
exit_code: 1,
output: "bench/only.out",
});
itest!(multifile_summary {
args: "bench bench/group_baseline.ts bench/pass.ts bench/multiple_group.ts",
exit_code: 0,
output: "bench/multifile_summary.out",
});
itest!(no_check {
args: "bench --no-check bench/no_check.ts",
exit_code: 1,
output: "bench/no_check.out",
});
itest!(allow_all {
args: "bench --allow-all bench/allow_all.ts",
exit_code: 0,
output: "bench/allow_all.out",
});
itest!(allow_none {
args: "bench bench/allow_none.ts",
exit_code: 1,
output: "bench/allow_none.out",
});
itest!(exit_sanitizer {
args: "bench bench/exit_sanitizer.ts",
output: "bench/exit_sanitizer.out",
exit_code: 1,
});
itest!(clear_timeout {
args: "bench bench/clear_timeout.ts",
exit_code: 0,
output: "bench/clear_timeout.out",
});
itest!(finally_timeout {
args: "bench bench/finally_timeout.ts",
exit_code: 1,
output: "bench/finally_timeout.out",
});
itest!(before_unload_prevent_default {
args: "bench --quiet bench/before_unload_prevent_default.ts",
output: "bench/before_unload_prevent_default.out",
});
itest!(group_baseline {
args: "bench bench/group_baseline.ts",
exit_code: 0,
output: "bench/group_baseline.out",
});
itest!(unresolved_promise {
args: "bench bench/unresolved_promise.ts",
exit_code: 1,
output: "bench/unresolved_promise.out",
});
itest!(unhandled_rejection {
args: "bench bench/unhandled_rejection.ts",
exit_code: 1,
output: "bench/unhandled_rejection.out",
});
itest!(filter {
args: "bench --filter=foo bench/filter",
exit_code: 0,
output: "bench/filter.out",
});
itest!(no_run {
args: "bench --no-run bench/no_run.ts",
output: "bench/no_run.out",
exit_code: 1,
});
itest!(no_prompt_by_default {
args: "bench --quiet bench/no_prompt_by_default.ts",
exit_code: 1,
output: "bench/no_prompt_by_default.out",
});
itest!(no_prompt_with_denied_perms {
args: "bench --quiet --allow-read bench/no_prompt_with_denied_perms.ts",
exit_code: 1,
output: "bench/no_prompt_with_denied_perms.out",
});
itest!(check_local_by_default {
args: "bench --quiet bench/check_local_by_default.ts",
output: "bench/check_local_by_default.out",
http_server: true,
});
itest!(check_local_by_default2 {
args: "bench --quiet bench/check_local_by_default2.ts",
output: "bench/check_local_by_default2.out",
http_server: true,
exit_code: 1,
});
itest!(bench_explicit_start_end {
args: "bench --quiet -A bench/explicit_start_and_end.ts",
output: "bench/explicit_start_and_end.out",
exit_code: 1,
});
itest_flaky!(bench_explicit_start_end_low_precision {
args: "bench --quiet -A bench/explicit_start_and_end_low_precision.ts",
output: "bench/explicit_start_and_end_low_precision.out",
});
itest!(bench_with_config {
args: "bench --config bench/collect/deno.jsonc bench/collect",
exit_code: 0,
output: "bench/collect.out",
});
itest!(bench_with_config2 {
args: "bench --config bench/collect/deno2.jsonc bench/collect",
exit_code: 0,
output: "bench/collect2.out",
});
itest!(bench_with_malformed_config {
args: "bench --config bench/collect/deno.malformed.jsonc",
exit_code: 1,
output: "bench/collect_with_malformed_config.out",
});
itest!(json_output {
args: "bench --json bench/pass.ts",
exit_code: 0,
output: "bench/pass.json.out",
});
#[test]
fn recursive_permissions_pledge() {
let context = TestContext::default();
@ -248,31 +36,6 @@ fn file_protocol() {
.assert_matches_file("bench/file_protocol.out");
}
itest!(package_json_basic {
args: "bench",
output: "package_json/basic/lib.bench.out",
envs: env_vars_for_npm_tests(),
http_server: true,
cwd: Some("package_json/basic"),
copy_temp_dir: Some("package_json/basic"),
exit_code: 0,
});
itest!(bench_lock {
args: "bench",
http_server: true,
cwd: Some("lockfile/basic"),
exit_code: 10,
output: "lockfile/basic/fail.out",
});
itest!(bench_no_lock {
args: "bench --no-lock",
http_server: true,
cwd: Some("lockfile/basic"),
output: "lockfile/basic/bench.nolock.out",
});
#[test]
fn conditionally_loads_type_graph() {
let context = TestContext::default();

View file

@ -1,53 +1,8 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use test_util::env_vars_for_npm_tests;
use test_util::itest;
use test_util::TestContext;
use test_util::TestContextBuilder;
itest!(_037_fetch_multiple {
args: "cache --reload --check=all run/fetch/test.ts run/fetch/other.ts",
http_server: true,
output: "cache/037_fetch_multiple.out",
});
itest!(_095_cache_with_bare_import {
args: "cache cache/095_cache_with_bare_import.ts",
output: "cache/095_cache_with_bare_import.ts.out",
exit_code: 1,
});
itest!(cache_extensionless {
args: "cache --reload --check=all http://localhost:4545/subdir/no_js_ext",
output: "cache/cache_extensionless.out",
http_server: true,
});
itest!(cache_random_extension {
args:
"cache --reload --check=all http://localhost:4545/subdir/no_js_ext@1.0.0",
output: "cache/cache_random_extension.out",
http_server: true,
});
itest!(performance_stats {
args: "cache --reload --check=all --log-level debug run/002_hello.ts",
output: "cache/performance_stats.out",
});
itest!(redirect_cache {
http_server: true,
args:
"cache --reload --check=all http://localhost:4548/subdir/redirects/a.ts",
output: "cache/redirect_cache.out",
});
itest!(ignore_require {
args: "cache --reload --no-check cache/ignore_require.js",
output_str: Some(""),
exit_code: 0,
});
// This test only runs on linux, because it hardcodes the XDG_CACHE_HOME env var
// which is only used on linux.
#[cfg(target_os = "linux")]
@ -71,33 +26,6 @@ fn xdg_cache_home_dir() {
assert!(xdg_cache_home.read_dir().count() > 0);
}
itest!(check_local_by_default {
args: "cache --quiet cache/check_local_by_default.ts",
output: "cache/check_local_by_default.out",
http_server: true,
});
itest!(check_local_by_default2 {
args: "cache --quiet cache/check_local_by_default2.ts",
output: "cache/check_local_by_default2.out",
http_server: true,
});
itest!(json_import {
// should not error
args: "cache --quiet cache/json_import/main.ts",
});
itest!(package_json_basic {
args: "cache main.ts",
output: "package_json/basic/main.cache.out",
envs: env_vars_for_npm_tests(),
http_server: true,
cwd: Some("package_json/basic"),
copy_temp_dir: Some("package_json/basic"),
exit_code: 0,
});
#[test]
fn cache_matching_package_json_dep_should_not_install_all() {
let context = TestContextBuilder::for_npm().use_temp_cwd().build();

View file

@ -6,24 +6,6 @@ use util::env_vars_for_npm_tests;
use util::TestContext;
use util::TestContextBuilder;
itest!(_095_check_with_bare_import {
args: "check cache/095_cache_with_bare_import.ts",
output: "cache/095_cache_with_bare_import.ts.out",
exit_code: 1,
});
itest!(check_extensionless {
args: "check --reload http://localhost:4545/subdir/no_js_ext",
output: "cache/cache_extensionless.out",
http_server: true,
});
itest!(check_random_extension {
args: "check --reload http://localhost:4545/subdir/no_js_ext@1.0.0",
output: "cache/cache_random_extension.out",
http_server: true,
});
itest!(check_all {
args: "check --quiet --all check/all/check_all.ts",
output: "check/all/check_all.out",
@ -145,12 +127,6 @@ itest!(check_imported_files_listed_in_exclude_option {
exit_code: 1,
});
itest!(check_with_excluded_file_specified {
args: "check lib/types.d.ts",
cwd: Some("check/excluded_file_specified/"),
output: "check/excluded_file_specified/check.out",
});
#[test]
fn cache_switching_config_then_no_config() {
let context = TestContext::default();
@ -279,13 +255,6 @@ itest!(check_dts {
exit_code: 1,
});
itest!(check_types_dts {
args: "check main.ts",
cwd: Some("check/types_dts/"),
output: "check/types_dts/main.out",
exit_code: 0,
});
itest!(package_json_basic {
args: "check main.ts",
output: "package_json/basic/main.check.out",

View file

@ -106,21 +106,6 @@ itest!(deno_doc_invalid_url {
exit_code: 1,
});
itest!(doc_lock {
args: "doc main.ts",
http_server: true,
cwd: Some("lockfile/basic"),
exit_code: 10,
output: "lockfile/basic/fail.out",
});
itest!(doc_no_lock {
args: "doc --no-lock main.ts",
http_server: true,
cwd: Some("lockfile/basic"),
output: "lockfile/basic/doc.nolock.out",
});
#[test]
fn deno_doc_html() {
let context = TestContext::default();

View file

@ -100,21 +100,6 @@ itest!(info_missing_module {
output: "info/info_missing_module.out",
});
itest!(info_lock {
args: "info main.ts",
http_server: true,
cwd: Some("lockfile/basic"),
exit_code: 10,
output: "lockfile/basic/fail.out",
});
itest!(info_no_lock {
args: "info --no-lock main.ts",
http_server: true,
cwd: Some("lockfile/basic"),
output: "lockfile/basic/info.nolock.out",
});
itest!(info_recursive_modules {
args: "info --quiet info/info_recursive_imports_test.ts",
output: "info/info_recursive_imports_test.out",

View file

@ -213,48 +213,6 @@ fn lint_with_glob_config_and_flags() {
assert_contains!(output, "Checked 2 files");
}
itest!(no_slow_types {
args: "lint",
output: "lint/no_slow_types/no_slow_types.out",
cwd: Some("lint/no_slow_types"),
exit_code: 1,
});
itest!(no_slow_types_entrypoint {
args: "lint a.ts",
output: "lint/no_slow_types/no_slow_types_entrypoint.out",
cwd: Some("lint/no_slow_types"),
exit_code: 1,
});
itest!(no_slow_types_non_entrypoint {
args: "lint d.ts",
output_str: Some("Checked 1 file\n"),
cwd: Some("lint/no_slow_types"),
exit_code: 0,
});
itest!(no_slow_types_excluded {
args: "lint --rules-exclude=no-slow-types",
output_str: Some("Checked 4 files\n"),
cwd: Some("lint/no_slow_types"),
exit_code: 0,
});
itest!(no_slow_types_non_package {
args: "lint --config=deno.non-package.json",
output_str: Some("Checked 4 files\n"),
cwd: Some("lint/no_slow_types"),
exit_code: 0,
});
itest!(no_slow_types_workspace {
args: "lint",
output: "lint/no_slow_types_workspace/output.out",
cwd: Some("lint/no_slow_types_workspace"),
exit_code: 1,
});
#[test]
fn opt_out_top_level_exclude_via_lint_unexclude() {
let context = TestContextBuilder::new().use_temp_cwd().build();

View file

@ -5,81 +5,11 @@ use std::process::Command;
use deno_core::serde_json::json;
use test_util::assert_contains;
use test_util::assert_not_contains;
use test_util::env_vars_for_jsr_npm_tests;
use test_util::env_vars_for_jsr_provenance_tests;
use test_util::env_vars_for_jsr_tests;
use test_util::env_vars_for_jsr_tests_with_git_check;
use test_util::env_vars_for_npm_tests;
use test_util::itest;
use test_util::TestContextBuilder;
itest!(no_token {
args: "publish",
cwd: Some("publish/missing_deno_json"),
output: "publish/no_token.out",
envs: env_vars_for_jsr_tests(),
exit_code: 1,
});
itest!(missing_deno_json {
args: "publish --token 'sadfasdf'",
output: "publish/missing_deno_json.out",
cwd: Some("publish/missing_deno_json"),
envs: env_vars_for_jsr_tests(),
exit_code: 1,
});
itest!(has_slow_types {
args: "publish --token 'sadfasdf'",
output: "publish/has_slow_types.out",
cwd: Some("publish/has_slow_types"),
envs: env_vars_for_jsr_tests(),
exit_code: 1,
});
itest!(allow_slow_types {
args: "publish --allow-slow-types --token 'sadfasdf'",
output: "publish/allow_slow_types.out",
cwd: Some("publish/has_slow_types"),
envs: env_vars_for_jsr_tests(),
http_server: true,
exit_code: 0,
});
itest!(invalid_path {
args: "publish --token 'sadfasdf'",
output: "publish/invalid_path.out",
cwd: Some("publish/invalid_path"),
envs: env_vars_for_jsr_tests(),
exit_code: 1,
});
itest!(symlink {
args: "publish --token 'sadfasdf' --dry-run",
output: "publish/symlink.out",
cwd: Some("publish/symlink"),
envs: env_vars_for_jsr_tests(),
exit_code: 0,
});
itest!(invalid_import {
args: "publish --token 'sadfasdf' --dry-run",
output: "publish/invalid_import.out",
cwd: Some("publish/invalid_import"),
envs: env_vars_for_jsr_npm_tests(),
exit_code: 1,
http_server: true,
});
itest!(invalid_import_esm_sh_suggestion {
args: "publish --token 'sadfasdf' --dry-run",
output: "publish/invalid_import_esm_sh_suggestion.out",
cwd: Some("publish/invalid_import_esm_sh_suggestion"),
envs: env_vars_for_jsr_npm_tests(),
exit_code: 1,
http_server: true,
});
#[test]
fn publish_non_exported_files_using_import_map() {
let context = publish_context_builder().build();
@ -139,191 +69,19 @@ fn publish_warning_not_in_graph() {
);
}
itest!(javascript_missing_decl_file {
args: "publish --token 'sadfasdf'",
output: "publish/javascript_missing_decl_file.out",
cwd: Some("publish/javascript_missing_decl_file"),
envs: env_vars_for_jsr_tests(),
exit_code: 0,
http_server: true,
});
itest!(unanalyzable_dynamic_import {
args: "publish --token 'sadfasdf'",
output: "publish/unanalyzable_dynamic_import.out",
cwd: Some("publish/unanalyzable_dynamic_import"),
envs: env_vars_for_jsr_tests(),
exit_code: 0,
http_server: true,
});
itest!(javascript_decl_file {
args: "publish --token 'sadfasdf'",
output: "publish/javascript_decl_file.out",
cwd: Some("publish/javascript_decl_file"),
envs: env_vars_for_jsr_tests(),
http_server: true,
exit_code: 0,
});
itest!(package_json {
args: "publish --token 'sadfasdf'",
output: "publish/package_json.out",
cwd: Some("publish/package_json"),
envs: env_vars_for_jsr_npm_tests(),
http_server: true,
});
itest!(successful {
args: "publish --token 'sadfasdf'",
output: "publish/successful.out",
cwd: Some("publish/successful"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(provenance {
args: "publish",
output: "publish/successful_provenance.out",
cwd: Some("publish/successful"),
envs: env_vars_for_jsr_provenance_tests(),
http_server: true,
});
itest!(no_check {
args: "publish --token 'sadfasdf' --no-check",
// still type checks the slow types output though
output: "publish/successful_no_check.out",
cwd: Some("publish/successful"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(node_specifier {
args: "publish --token 'sadfasdf'",
output: "publish/node_specifier.out",
cwd: Some("publish/node_specifier"),
envs: env_vars_for_jsr_tests()
.into_iter()
.chain(env_vars_for_npm_tests().into_iter())
.collect(),
http_server: true,
});
itest!(config_file_jsonc {
args: "publish --token 'sadfasdf'",
output: "publish/deno_jsonc.out",
cwd: Some("publish/deno_jsonc"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(workspace_all {
args: "publish --token 'sadfasdf'",
output: "publish/workspace.out",
cwd: Some("publish/workspace"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(workspace_individual {
args: "publish --token 'sadfasdf'",
output: "publish/workspace_individual.out",
cwd: Some("publish/workspace/bar"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(dry_run {
args: "publish --token 'sadfasdf' --dry-run",
cwd: Some("publish/successful"),
output: "publish/dry_run.out",
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(config_flag {
args: "publish --token 'sadfasdf' --config=successful/deno.json",
output: "publish/successful.out",
cwd: Some("publish"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(bare_node_builtins {
args: "publish --token 'sadfasdf' --dry-run --unstable-bare-node-builtins",
output: "publish/bare_node_builtins.out",
cwd: Some("publish/bare_node_builtins"),
envs: env_vars_for_jsr_npm_tests(),
http_server: true,
});
itest!(bare_node_builtins_warning_no_warnings {
args: "publish --token 'sadfasdf' --dry-run --unstable-bare-node-builtins",
output: "publish/bare_node_builtins_no_warnings.out",
cwd: Some("publish/bare_node_builtins"),
envs: env_vars_for_jsr_npm_tests()
.into_iter()
.chain(
vec![(
"DENO_DISABLE_PEDANTIC_NODE_WARNINGS".to_string(),
"1".to_string()
)]
.into_iter()
)
.collect(),
http_server: true,
});
itest!(sloppy_imports {
args: "publish --token 'sadfasdf' --dry-run --unstable-sloppy-imports",
output: "publish/sloppy_imports.out",
cwd: Some("publish/sloppy_imports"),
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(sloppy_imports_not_enabled {
args: "publish --token 'sadfasdf' --dry-run",
output: "publish/sloppy_imports_not_enabled.out",
cwd: Some("publish/sloppy_imports"),
envs: env_vars_for_jsr_tests(),
http_server: true,
exit_code: 1,
});
itest!(sloppy_imports_no_warnings {
args: "publish --token 'sadfasdf' --dry-run --unstable-sloppy-imports",
output: "publish/sloppy_imports_no_warnings.out",
cwd: Some("publish/sloppy_imports"),
envs: env_vars_for_jsr_tests()
.into_iter()
.chain(
vec![(
"DENO_DISABLE_PEDANTIC_NODE_WARNINGS".to_string(),
"1".to_string()
)]
.into_iter()
)
.collect(),
http_server: true,
});
itest!(jsr_jsonc {
args: "publish --token 'sadfasdf'",
cwd: Some("publish/jsr_jsonc"),
output: "publish/jsr_jsonc/mod.out",
envs: env_vars_for_jsr_tests(),
http_server: true,
});
itest!(unsupported_jsx_tsx {
args: "publish --token 'sadfasdf'",
cwd: Some("publish/unsupported_jsx_tsx"),
output: "publish/unsupported_jsx_tsx/mod.out",
envs: env_vars_for_jsr_npm_tests(),
http_server: true,
});
#[test]
fn provenance() {
TestContextBuilder::new()
.use_http_server()
.envs(env_vars_for_jsr_provenance_tests())
.cwd("publish/successful")
.build()
.new_command()
.args("publish")
.run()
.assert_exit_code(0)
.assert_matches_file("publish/successful_provenance.out");
}
#[test]
fn ignores_gitignore() {
@ -629,20 +387,6 @@ fn not_includes_vendor_dir_only_when_vendor_true() {
}
}
fn publish_context_builder() -> TestContextBuilder {
TestContextBuilder::new()
.use_http_server()
.envs(env_vars_for_jsr_tests())
.use_temp_cwd()
}
fn publish_context_builder_with_git_checks() -> TestContextBuilder {
TestContextBuilder::new()
.use_http_server()
.envs(env_vars_for_jsr_tests_with_git_check())
.use_temp_cwd()
}
#[test]
fn allow_dirty() {
let context = publish_context_builder_with_git_checks().build();
@ -739,3 +483,17 @@ fn allow_dirty_dry_run() {
let output = output.combined_output();
assert_contains!(output, "Aborting due to uncommitted changes. Check in source code or run with --allow-dirty");
}
fn publish_context_builder() -> TestContextBuilder {
TestContextBuilder::new()
.use_http_server()
.envs(env_vars_for_jsr_tests())
.use_temp_cwd()
}
fn publish_context_builder_with_git_checks() -> TestContextBuilder {
TestContextBuilder::new()
.use_http_server()
.envs(env_vars_for_jsr_tests_with_git_check())
.use_temp_cwd()
}

View file

@ -3439,17 +3439,6 @@ itest!(config_not_auto_discovered_for_remote_script {
http_server: true,
});
itest!(package_json_auto_discovered_for_local_script_arg {
args: "run -L debug -A no_deno_json/main.ts",
output: "run/with_package_json/no_deno_json/main.out",
// notice this is not in no_deno_json
cwd: Some("run/with_package_json/"),
// prevent creating a node_modules dir in the code directory
copy_temp_dir: Some("run/with_package_json/"),
envs: env_vars_for_npm_tests(),
http_server: true,
});
// In this case we shouldn't discover `package.json` file, because it's in a
// directory that is above the directory containing `deno.json` file.
itest!(
@ -3464,36 +3453,6 @@ itest!(
}
);
itest!(package_json_not_auto_discovered_no_config {
args: "run -L debug -A --no-config noconfig.ts",
output: "run/with_package_json/no_deno_json/noconfig.out",
cwd: Some("run/with_package_json/no_deno_json/"),
});
itest!(package_json_not_auto_discovered_no_npm {
args: "run -L debug -A --no-npm noconfig.ts",
output: "run/with_package_json/no_deno_json/noconfig.out",
cwd: Some("run/with_package_json/no_deno_json/"),
});
itest!(package_json_not_auto_discovered_env_var {
args: "run -L debug -A noconfig.ts",
output: "run/with_package_json/no_deno_json/noconfig.out",
cwd: Some("run/with_package_json/no_deno_json/"),
envs: vec![("DENO_NO_PACKAGE_JSON".to_string(), "1".to_string())],
});
itest!(
package_json_auto_discovered_node_modules_relative_package_json {
args: "run -A main.js",
output: "run/with_package_json/no_deno_json/sub_dir/main.out",
cwd: Some("run/with_package_json/no_deno_json/sub_dir"),
copy_temp_dir: Some("run/with_package_json/no_deno_json/"),
envs: env_vars_for_npm_tests(),
http_server: true,
}
);
itest!(package_json_auto_discovered_for_npm_binary {
args: "run -L debug -A npm:@denotest/bin/cli-esm this is a test",
output: "run/with_package_json/npm_binary/main.out",
@ -3503,14 +3462,6 @@ itest!(package_json_auto_discovered_for_npm_binary {
http_server: true,
});
itest!(package_json_auto_discovered_no_package_json_imports {
// this should not use --quiet because we should ensure no package.json install occurs
args: "run -A no_package_json_imports.ts",
output: "run/with_package_json/no_deno_json/no_package_json_imports.out",
cwd: Some("run/with_package_json/no_deno_json"),
copy_temp_dir: Some("run/with_package_json/no_deno_json"),
});
#[test]
fn package_json_with_deno_json() {
let context = TestContextBuilder::for_npm()

View file

@ -584,21 +584,6 @@ itest!(package_json_basic {
exit_code: 0,
});
itest!(test_lock {
args: "test",
http_server: true,
cwd: Some("lockfile/basic"),
exit_code: 10,
output: "lockfile/basic/fail.out",
});
itest!(test_no_lock {
args: "test --no-lock",
http_server: true,
cwd: Some("lockfile/basic"),
output: "lockfile/basic/test.nolock.out",
});
itest!(test_replace_timers {
args: "test test/replace_timers.js",
output: "test/replace_timers.js.out",
@ -676,12 +661,6 @@ fn conditionally_loads_type_graph() {
assert_not_contains!(output.combined_output(), "type_reference.d.ts");
}
itest!(test_include_relative_pattern_dot_slash {
args: "test",
output: "test/relative_pattern_dot_slash/output.out",
cwd: Some("test/relative_pattern_dot_slash"),
});
#[test]
fn opt_out_top_level_exclude_via_test_unexclude() {
let context = TestContextBuilder::new().use_temp_cwd().build();

View file

@ -78,6 +78,7 @@ a "steps" array.
- `output` - Path to use to assert the output.
- `cleanDenoDir` (boolean) - Whether to empty the deno_dir before running the
step.
- `flaky` - Step should be repeated until success a maximum of 3 times.
- `if` (`"windows"`, `"linux"`, `"mac"`, `"unix"`) - Whether to run this step.
- `exitCode` (number) - Expected exit code.

View file

@ -0,0 +1,5 @@
{
"args": "bench --allow-all allow_all.ts",
"output": "allow_all.out",
"exitCode": 0
}

View file

@ -0,0 +1,23 @@
[WILDCARD]
Check [WILDLINE]/allow_all.ts
cpu: [WILDLINE]
runtime: deno [WILDLINE] ([WILDLINE])
[WILDLINE]/allow_all.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
------------------------------------------------------------------ -----------------------------
read false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
read true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
write false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
write true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
net false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
net true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
env false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
env true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
run false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
run true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
ffi false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
ffi true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
hrtime false [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]
hrtime true [WILDLINE] [WILDLINE]/iter[WILDLINE]([WILDLINE] … [WILDLINE]) [WILDLINE]

View file

@ -1,4 +1,4 @@
import { assertEquals } from "../../../tests/util/std/assert/mod.ts";
import { assertEquals } from "jsr:@std/assert";
const permissions: Deno.PermissionName[] = [
"read",

View file

@ -0,0 +1,5 @@
{
"args": "bench allow_none.ts",
"output": "allow_none.out",
"exitCode": 1
}

View file

@ -0,0 +1,22 @@
Check [WILDLINE]/allow_none.ts
cpu: [WILDLINE]
runtime: deno [WILDLINE] ([WILDLINE])
[WILDLINE]/allow_none.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
read error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
write error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
net error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
env error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
run error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
ffi error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
hrtime error: PermissionDenied: Can't escalate parent thread permissions
[WILDCARD]
error: Bench failed

View file

@ -1,5 +1,3 @@
import { unreachable } from "../../../tests/util/std/assert/mod.ts";
const permissions: Deno.PermissionName[] = [
"read",
"write",
@ -17,7 +15,7 @@ for (const name of permissions) {
[name]: true,
},
fn() {
unreachable();
throw new Error("unreachable");
},
});
}

View file

@ -0,0 +1,4 @@
{
"args": "bench --quiet before_unload_prevent_default.ts",
"output": "before_unload_prevent_default.out"
}

View file

@ -0,0 +1,7 @@
cpu: [WILDCARD]
runtime: deno [WILDCARD]
[WILDCARD]/before_unload_prevent_default.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
foo [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]

View file

@ -0,0 +1,5 @@
{
"args": "bench --quiet -A explicit_start_and_end.ts",
"output": "explicit_start_and_end.out",
"exitCode": 1
}

View file

@ -0,0 +1,25 @@
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/explicit_start_and_end.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
------------------------------------------------------------------- -----------------------------
start and end [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
start only [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
end only [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
double start error: TypeError: BenchContext::start() has already been invoked.
t.start();
^
at BenchContext.start ([WILDCARD])
at [WILDCARD]/explicit_start_and_end.ts:[WILDCARD]
double end error: TypeError: BenchContext::end() has already been invoked.
t.end();
^
at BenchContext.end ([WILDCARD])
at [WILDCARD]/explicit_start_and_end.ts:[WILDCARD]
captured error: TypeError: The benchmark which this context belongs to is not being executed.
captured!.start();
^
at BenchContext.start ([WILDCARD])
at [WILDCARD]/explicit_start_and_end.ts:[WILDCARD]
error: Bench failed

View file

@ -0,0 +1,5 @@
{
"args": "bench bench_formatting.ts",
"output": "bench_formatting.out",
"exitCode": 0
}

View file

@ -0,0 +1,8 @@
Check [WILDCARD]/bench_formatting.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/bench_formatting.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
[WILDCARD] [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]

View file

@ -0,0 +1,4 @@
{
"args": "bench --quiet check_local_by_default.ts",
"output": "check_local_by_default.out"
}

View file

@ -0,0 +1,6 @@
[WILDCARD]
[WILDCARD]/check_local_by_default.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -0,0 +1,5 @@
{
"args": "bench --quiet check_local_by_default2.ts",
"output": "check_local_by_default2.out",
"exitCode": 1
}

View file

@ -0,0 +1,4 @@
error: TS2322 [ERROR]: Type '12' is not assignable to type '"b"'.
const b: "b" = 12;
^
at [WILDCARD]/check_local_by_default2.ts:3:7

View file

@ -0,0 +1,5 @@
{
"args": "bench clear_timeout.ts",
"output": "clear_timeout.out",
"exitCode": 0
}

View file

@ -0,0 +1,10 @@
Check [WILDCARD]/clear_timeout.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/clear_timeout.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
bench1 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench2 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench3 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]

View file

@ -0,0 +1,19 @@
{
"steps": [{
"args": "bench --ignore=collect/ignore collect",
"output": "collect.out"
}, {
"cleanDenoDir": true,
"args": "bench --config collect/deno.jsonc collect",
"output": "collect.out"
}, {
"cleanDenoDir": true,
"args": "bench --config collect/deno2.jsonc collect",
"output": "collect2.out"
}, {
"cleanDenoDir": true,
"args": "bench --config collect/deno.malformed.jsonc",
"exitCode": 1,
"output": "collect_with_malformed_config.out"
}]
}

View file

@ -1,18 +1,18 @@
Check [WILDCARD]/bench/collect/bench.ts
Check [WILDCARD]/bench/collect/include/2_bench.ts
Check [WILDCARD]/bench/collect/include/bench.ts
Check [WILDCARD]/collect/bench.ts
Check [WILDCARD]/collect/include/2_bench.ts
Check [WILDCARD]/collect/include/bench.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/bench/collect/bench.ts
[WILDCARD]/collect/bench.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
[WILDCARD]/bench/collect/include/2_bench.ts
[WILDCARD]/collect/include/2_bench.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
[WILDCARD]/bench/collect/include/bench.ts
[WILDCARD]/collect/include/bench.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -1,13 +1,13 @@
Check [WILDCARD]/bench/collect/bench.ts
Check [WILDCARD]/bench/collect/include/bench.ts
Check [WILDCARD]/collect/bench.ts
Check [WILDCARD]/collect/include/bench.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/bench/collect/bench.ts
[WILDCARD]/collect/bench.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
[WILDCARD]/bench/collect/include/bench.ts
[WILDCARD]/collect/include/bench.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -0,0 +1,5 @@
{
"args": "bench exit_sanitizer.ts",
"output": "exit_sanitizer.out",
"exitCode": 1
}

View file

@ -0,0 +1,14 @@
Check [WILDCARD]/exit_sanitizer.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/exit_sanitizer.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
exit(0) error: Error: Bench attempted to exit with exit code: 0
[WILDCARD]
exit(1) error: Error: Bench attempted to exit with exit code: 1
[WILDCARD]
exit(2) error: Error: Bench attempted to exit with exit code: 2
[WILDCARD]
error: Bench failed

View file

@ -0,0 +1,5 @@
{
"args": "bench --quiet -A main.bench.ts",
"output": "main.bench.out",
"flaky": true
}

View file

@ -1,7 +1,7 @@
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/explicit_start_and_end_low_precision.ts
[WILDCARD]/main.bench.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
----------------------------------------------------------------------------- -----------------------------
noop with start and end [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]

View file

@ -0,0 +1,5 @@
{
"args": "bench fail.ts",
"output": "fail.out",
"exitCode": 1
}

View file

@ -0,0 +1,28 @@
Check [WILDCARD]/fail.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/fail.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
bench0 error: Error
[WILDCARD]
bench1 error: Error
[WILDCARD]
bench2 error: Error
[WILDCARD]
bench3 error: Error
[WILDCARD]
bench4 error: Error
[WILDCARD]
bench5 error: Error
[WILDCARD]
bench6 error: Error
[WILDCARD]
bench7 error: Error
[WILDCARD]
bench8 error: Error
[WILDCARD]
bench9 error: Error
[WILDCARD]
error: Bench failed

View file

@ -0,0 +1,4 @@
{
"args": "bench --filter=foo",
"output": "filter.out"
}

View file

@ -0,0 +1,5 @@
{
"args": "bench finally_timeout.ts",
"output": "finally_timeout.out",
"exitCode": 1
}

View file

@ -0,0 +1,11 @@
Check [WILDCARD]/finally_timeout.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/finally_timeout.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
error error: Error: fail
[WILDCARD]
success [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
error: Bench failed

View file

@ -0,0 +1,5 @@
{
"args": "bench group_baseline.ts",
"output": "group_baseline.out",
"exitCode": 0
}

View file

@ -0,0 +1,20 @@
[WILDCARD]/group_baseline.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
-------------------------------------------------------------------- -----------------------------
noop [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
noop2 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
summary
noo[WILDCARD]
[WILDCARD]x [WILDCARD] than noo[WILDCARD]
group url
noop3 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
parse url 2x [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
parse url 200x [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
summary
parse url 2x
[WILDCARD]x slower than noop3
[WILDCARD]x faster than parse url 200x

View file

@ -0,0 +1,5 @@
{
"args": "bench ignore.ts",
"output": "ignore.out",
"exitCode": 0
}

View file

@ -0,0 +1,8 @@
Check [WILDCARD]/ignore.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/ignore.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -0,0 +1,5 @@
{
"args": "bench ignore_permissions.ts",
"output": "ignore_permissions.out",
"exitCode": 0
}

View file

@ -0,0 +1,8 @@
Check [WILDCARD]/ignore_permissions.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/ignore_permissions.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -0,0 +1,5 @@
{
"args": "bench interval.ts",
"output": "interval.out",
"exitCode": 0
}

View file

@ -0,0 +1,8 @@
Check [WILDCARD]/interval.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/interval.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -0,0 +1,5 @@
{
"args": "bench --json pass.ts",
"output": "pass.json.out",
"exitCode": 0
}

View file

@ -0,0 +1,28 @@
Check file:///[WILDCARD]/pass.ts
{
"runtime": "Deno/[WILDCARD]",
"cpu": "[WILDCARD]",
"benches": [
{
"origin": "file:///[WILDCARD]/pass.ts",
"group": null,
"name": "bench0",
"baseline": false,
"results": [
{
"ok": {
"n": [WILDCARD],
"min": [WILDCARD],
"max": [WILDCARD],
"avg": [WILDCARD],
"p75": [WILDCARD],
"p99": [WILDCARD],
"p995": [WILDCARD],
"p999": [WILDCARD]
}
}
]
},
[WILDCARD]
]
}

View file

@ -0,0 +1,5 @@
{
"args": "bench load_unload.ts",
"output": "load_unload.out",
"exitCode": 0
}

View file

@ -0,0 +1,8 @@
Check [WILDCARD]/load_unload.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/load_unload.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
bench [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]

View file

@ -0,0 +1,5 @@
{
"args": "bench meta.ts",
"output": "meta.out",
"exitCode": 0
}

View file

@ -0,0 +1,10 @@
Check [WILDCARD]/meta.ts
import.meta.main: false
import.meta.url: [WILDCARD]/meta.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/meta.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------

View file

@ -0,0 +1,5 @@
{
"args": "bench group_baseline.ts pass.ts multiple_group.ts",
"output": "multifile_summary.out",
"exitCode": 0
}

View file

@ -0,0 +1,15 @@
Deno.bench("noop", () => {});
Deno.bench("noop2", { baseline: true }, () => {});
Deno.bench("noop3", { group: "url" }, () => {});
Deno.bench("parse url 2x", { group: "url", baseline: true }, () => {
new URL("https://deno.land/std/http/server.ts");
new URL("https://deno.land/std/http/server.ts");
});
Deno.bench("parse url 200x", { group: "url" }, () => {
for (let i = 0; i < 200; i++) {
new URL("https://deno.land/std/http/server.ts");
}
});

View file

@ -0,0 +1,64 @@
Check [WILDCARD]/group_baseline.ts
Check [WILDCARD]/pass.ts
Check [WILDCARD]/multiple_group.ts
cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/group_baseline.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
-------------------------------------------------------------------- -----------------------------
noop [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
noop2 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
summary
noo[WILDCARD]
[WILDCARD]x [WILDCARD] than noo[WILDCARD]
group url
noop3 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
parse url 2x [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
parse url 200x [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
summary
parse url 2x
[WILDLINE]x slower than noop3
[WILDLINE]x faster than parse url 200x
[WILDLINE]/pass.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
bench0 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench1 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench2 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench3 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench4 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench5 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench6 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench7 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench8 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
bench9 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
[WILDLINE]/multiple_group.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
-------------------------------------------------------------------- -----------------------------
group noop
noop [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
noop2 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
summary
noo[WILDCARD]
[WILDCARD]x [WILDCARD] than noo[WILDCARD]
group url
noop3 [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
parse url 2x [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
parse url 200x [WILDCARD] [WILDCARD]/iter[WILDCARD]([WILDCARD] … [WILDCARD]) [WILDCARD]
summary
parse url 2x
[WILDCARD]x slower than noop3
[WILDCARD]x faster than parse url 200x

View file

@ -0,0 +1,10 @@
Deno.bench("bench0", () => {});
Deno.bench("bench1", () => {});
Deno.bench("bench2", () => {});
Deno.bench("bench3", () => {});
Deno.bench("bench4", () => {});
Deno.bench("bench5", () => {});
Deno.bench("bench6", () => {});
Deno.bench("bench7", () => {});
Deno.bench("bench8", () => {});
Deno.bench("bench9", () => {});

View file

@ -0,0 +1,5 @@
{
"args": "bench --no-check no_check.ts",
"output": "no_check.out",
"exitCode": 1
}

View file

@ -0,0 +1,9 @@
error: (in promise) TypeError: Cannot read properties of undefined (reading 'fn')
Deno.bench();
^
at [WILDCARD]
at [WILDCARD]/no_check.ts:1:6
This error was not caught from a benchmark and caused the bench runner to fail on the referenced module.
It most likely originated from a dangling promise, event/timeout handler or top-level code.
error: Bench failed

View file

@ -0,0 +1,5 @@
{
"args": "bench --quiet no_prompt_by_default.ts",
"output": "no_prompt_by_default.out",
"exitCode": 1
}

View file

@ -0,0 +1,9 @@
[WILDCARD]cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/no_prompt_by_default.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
no prompt error: PermissionDenied: Requires read access to "./some_file.txt", run again with the --allow-read flag
[WILDCARD]
error: Bench failed

View file

@ -0,0 +1,5 @@
{
"args": "bench --quiet --allow-read no_prompt_with_denied_perms.ts",
"output": "no_prompt_with_denied_perms.out",
"exitCode": 1
}

View file

@ -0,0 +1,9 @@
[WILDCARD]cpu: [WILDCARD]
runtime: deno [WILDCARD] ([WILDCARD])
[WILDCARD]/no_prompt_with_denied_perms.ts
benchmark time (avg) iter/s (min … max) p75 p99 p995
--------------------------------------------------------------- -----------------------------
no prompt error: PermissionDenied: Requires read access to "./some_file.txt", run again with the --allow-read flag
[WILDCARD]
error: Bench failed

View file

@ -0,0 +1,5 @@
{
"args": "bench --no-run no_run.ts",
"output": "no_run.out",
"exitCode": 1
}

Some files were not shown because too many files have changed in this diff Show more