1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

chore(test): use file_test_runner for spec tests (#23348)

Extracted out this code to https://github.com/denoland/file_test_runner
and added parallelism. This makes these tests run 6 seconds faster on my
machine and allows re-using this code in other crates like deno_graph,
deno_doc, etc (ex. https://github.com/denoland/deno_graph/pull/437).
This commit is contained in:
David Sherret 2024-04-12 17:58:40 -04:00 committed by GitHub
parent c56f2e0fc0
commit 4e8d30fca1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 126 additions and 239 deletions

14
Cargo.lock generated
View file

@ -689,6 +689,7 @@ dependencies = [
"deno_terminal",
"deno_tls",
"fastwebsockets",
"file_test_runner",
"flaky_test",
"http 1.1.0",
"http-body-util",
@ -2637,6 +2638,19 @@ version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f"
[[package]]
name = "file_test_runner"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13bacb20a988be248a13adc6011bce718648f78d4684d4dd0444d05256f4a7b"
dependencies = [
"crossbeam-channel",
"deno_terminal",
"parking_lot 0.12.1",
"regex",
"thiserror",
]
[[package]]
name = "filetime"
version = "0.2.23"

View file

@ -43,6 +43,7 @@ deno_lockfile.workspace = true
deno_terminal.workspace = true
deno_tls.workspace = true
fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] }
file_test_runner = "0.2.0"
flaky_test = "=0.1.0"
http.workspace = true
http-body-util.workspace = true

View file

@ -8,144 +8,13 @@ use std::rc::Rc;
use std::sync::Arc;
use deno_core::anyhow::Context;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_terminal::colors;
use serde::Deserialize;
use test_util::tests_path;
use test_util::PathRef;
use test_util::TestContextBuilder;
pub fn main() {
let maybe_filter = parse_cli_arg_filter();
let categories = filter(collect_tests(), maybe_filter.as_deref());
let total_tests = categories.iter().map(|c| c.tests.len()).sum::<usize>();
let mut failures = Vec::new();
let _http_guard = test_util::http_server();
// todo(dsherret): the output should be changed to be terse
// when it passes, but verbose on failure
for category in &categories {
if category.tests.is_empty() {
continue; // skip output when all the tests have been filtered out
}
eprintln!();
eprintln!(" {} {}", colors::green_bold("Running"), category.name);
eprintln!();
for test in &category.tests {
eprint!("test {} ... ", test.name);
let diagnostic_logger = Rc::new(RefCell::new(Vec::<u8>::new()));
let panic_message = Arc::new(Mutex::new(Vec::<u8>::new()));
std::panic::set_hook({
let panic_message = panic_message.clone();
Box::new(move |info| {
panic_message
.lock()
.extend(format!("{}", info).into_bytes());
})
});
let result = std::panic::catch_unwind(AssertUnwindSafe(|| {
run_test(test, diagnostic_logger.clone())
}));
let success = result.is_ok();
if !success {
let mut output = diagnostic_logger.borrow().clone();
output.push(b'\n');
output.extend(panic_message.lock().iter());
failures.push((test, output));
}
eprintln!(
"{}",
if success {
colors::green("ok")
} else {
colors::red("fail")
},
);
}
}
eprintln!();
if !failures.is_empty() {
eprintln!("spec failures:");
eprintln!();
for (failure, output) in &failures {
eprintln!("---- {} ----", failure.name);
eprintln!("{}", String::from_utf8_lossy(output));
eprintln!("Test file: {}", failure.manifest_file());
eprintln!();
}
panic!("{} failed of {}", failures.len(), total_tests);
} else {
eprintln!("{} tests passed", total_tests);
}
eprintln!();
}
fn parse_cli_arg_filter() -> Option<String> {
let args: Vec<String> = std::env::args().collect();
let maybe_filter =
args.get(1).filter(|s| !s.starts_with('-') && !s.is_empty());
maybe_filter.cloned()
}
fn run_test(test: &Test, diagnostic_logger: Rc<RefCell<Vec<u8>>>) {
let metadata = &test.metadata;
let mut builder = TestContextBuilder::new();
builder = builder.logging_capture(diagnostic_logger);
let cwd = &test.cwd;
if test.metadata.temp_dir {
builder = builder.use_temp_cwd();
} else {
builder = builder.cwd(cwd.to_string_lossy());
}
match &metadata.base {
// todo(dsherret): add bases in the future as needed
Some(base) => panic!("Unknown test base: {}", base),
None => {
// by default add npm and jsr env vars
builder = builder.add_jsr_env_vars().add_npm_env_vars();
}
}
let context = builder.build();
if test.metadata.temp_dir {
// copy all the files in the cwd to a temp directory
// excluding the metadata and assertion files
let temp_dir = context.temp_dir().path();
let assertion_paths = test.resolve_test_and_assertion_files();
cwd.copy_to_recursive_with_exclusions(temp_dir, &assertion_paths);
}
for step in &metadata.steps {
if step.clean_deno_dir {
context.deno_dir().path().remove_dir_all();
}
let command = context
.new_command()
.envs(metadata.envs.iter().chain(step.envs.iter()));
let command = match &step.args {
VecOrString::Vec(args) => command.args_vec(args),
VecOrString::String(text) => command.args(text),
};
let command = match &step.cwd {
Some(cwd) => command.current_dir(cwd),
None => command,
};
let output = command.run();
if step.output.ends_with(".out") {
let test_output_path = cwd.join(&step.output);
output.assert_matches_file(test_output_path);
} else {
output.assert_matches_text(&step.output);
}
output.assert_exit_code(step.exit_code);
}
}
const MANIFEST_FILE_NAME: &str = "__test__.jsonc";
#[derive(Clone, Deserialize)]
#[serde(untagged)]
@ -207,126 +76,129 @@ struct StepMetaData {
pub exit_code: i32,
}
#[derive(Clone)]
struct Test {
pub name: String,
pub cwd: PathRef,
pub metadata: MultiTestMetaData,
}
pub fn main() {
let root_category =
file_test_runner::collect_tests_or_exit(file_test_runner::CollectOptions {
base: tests_path().join("specs").to_path_buf(),
strategy: file_test_runner::FileCollectionStrategy::TestPerDirectory {
file_name: MANIFEST_FILE_NAME.to_string(),
},
root_category_name: "specs".to_string(),
filter_override: None,
});
impl Test {
pub fn manifest_file(&self) -> PathRef {
self.cwd.join("__test__.json")
if root_category.is_empty() {
return; // all tests filtered out
}
let _http_guard = test_util::http_server();
file_test_runner::run_tests(
&root_category,
file_test_runner::RunOptions { parallel: true },
Arc::new(|test| {
let diagnostic_logger = Rc::new(RefCell::new(Vec::<u8>::new()));
let result = file_test_runner::TestResult::from_maybe_panic(
AssertUnwindSafe(|| run_test(test, diagnostic_logger.clone())),
);
match result {
file_test_runner::TestResult::Passed
| file_test_runner::TestResult::Ignored => result,
file_test_runner::TestResult::Failed {
output: panic_output,
} => {
let mut output = diagnostic_logger.borrow().clone();
output.push(b'\n');
output.extend(panic_output);
file_test_runner::TestResult::Failed { output }
}
}
}),
);
}
impl Test {
pub fn resolve_test_and_assertion_files(&self) -> HashSet<PathRef> {
let mut result = HashSet::with_capacity(self.metadata.steps.len() + 1);
result.insert(self.manifest_file());
result.extend(
self
.metadata
.steps
.iter()
.map(|step| self.cwd.join(&step.output)),
);
result
fn run_test(
test: &file_test_runner::CollectedTest,
diagnostic_logger: Rc<RefCell<Vec<u8>>>,
) {
let metadata_path = PathRef::new(&test.path);
let metadata_value = metadata_path.read_jsonc_value();
// checking for "steps" leads to a more targeted error message
// instead of when deserializing an untagged enum
let metadata = if metadata_value
.as_object()
.and_then(|o| o.get("steps"))
.is_some()
{
serde_json::from_value::<MultiTestMetaData>(metadata_value)
} else {
serde_json::from_value::<SingleTestMetaData>(metadata_value)
.map(|s| s.into_multi())
}
}
.with_context(|| format!("Failed to parse {}", metadata_path))
.unwrap();
struct TestCategory {
pub name: String,
pub tests: Vec<Test>,
}
let mut builder = TestContextBuilder::new();
builder = builder.logging_capture(diagnostic_logger);
let cwd = PathRef::new(test.path.parent().unwrap());
fn filter(
categories: Vec<TestCategory>,
maybe_filter: Option<&str>,
) -> Vec<TestCategory> {
if categories.iter().all(|c| c.tests.is_empty()) {
panic!("no tests found");
if metadata.temp_dir {
builder = builder.use_temp_cwd();
} else {
builder = builder.cwd(cwd.to_string_lossy());
}
match maybe_filter {
Some(filter) => categories
.into_iter()
.map(|mut c| {
c.tests.retain(|t| t.name.contains(filter));
c
})
.collect(),
None => categories,
}
}
fn collect_tests() -> Vec<TestCategory> {
let specs_dir = tests_path().join("specs");
let mut result = Vec::new();
for entry in specs_dir.read_dir() {
let entry = entry.unwrap();
let file_type = entry
.file_type()
.context(entry.path().to_string_lossy().to_string())
.unwrap();
if !file_type.is_dir() {
continue;
match &metadata.base {
// todo(dsherret): add bases in the future as needed
Some(base) => panic!("Unknown test base: {}", base),
None => {
// by default add npm and jsr env vars
builder = builder.add_jsr_env_vars().add_npm_env_vars();
}
}
let context = builder.build();
if metadata.temp_dir {
// copy all the files in the cwd to a temp directory
// excluding the metadata and assertion files
let temp_dir = context.temp_dir().path();
let assertion_paths = resolve_test_and_assertion_files(&cwd, &metadata);
cwd.copy_to_recursive_with_exclusions(temp_dir, &assertion_paths);
}
for step in &metadata.steps {
if step.clean_deno_dir {
context.deno_dir().path().remove_dir_all();
}
let mut category = TestCategory {
name: format!("specs::{}", entry.file_name().to_string_lossy()),
tests: Vec::new(),
let command = context
.new_command()
.envs(metadata.envs.iter().chain(step.envs.iter()));
let command = match &step.args {
VecOrString::Vec(args) => command.args_vec(args),
VecOrString::String(text) => command.args(text),
};
let category_path = PathRef::new(entry.path());
for entry in category_path.read_dir() {
let entry = entry.unwrap();
let file_type = entry
.file_type()
.context(entry.path().to_string_lossy().to_string())
.unwrap();
if !file_type.is_dir() {
continue;
}
let test_dir = PathRef::new(entry.path());
let metadata_path = test_dir.join("__test__.jsonc");
let metadata_value = metadata_path.read_jsonc_value();
// checking for "steps" leads to a more targeted error message
// instead of when deserializing an untagged enum
let metadata = if metadata_value
.as_object()
.and_then(|o| o.get("steps"))
.is_some()
{
serde_json::from_value::<MultiTestMetaData>(metadata_value)
} else {
serde_json::from_value::<SingleTestMetaData>(metadata_value)
.map(|s| s.into_multi())
}
.with_context(|| format!("Failed to parse {}", metadata_path))
.unwrap();
let test_name =
format!("{}::{}", category.name, entry.file_name().to_string_lossy());
// only support characters that work with filtering with `cargo test`
if !test_name
.chars()
.all(|c| c.is_alphanumeric() || matches!(c, '_' | ':'))
{
panic!(
"Invalid test name (only supports alphanumeric and underscore): {}",
test_name
);
}
category.tests.push(Test {
name: test_name,
cwd: test_dir,
metadata,
});
let command = match &step.cwd {
Some(cwd) => command.current_dir(cwd),
None => command,
};
let output = command.run();
if step.output.ends_with(".out") {
let test_output_path = cwd.join(&step.output);
output.assert_matches_file(test_output_path);
} else {
output.assert_matches_text(&step.output);
}
result.push(category);
output.assert_exit_code(step.exit_code);
}
}
fn resolve_test_and_assertion_files(
dir: &PathRef,
metadata: &MultiTestMetaData,
) -> HashSet<PathRef> {
let mut result = HashSet::with_capacity(metadata.steps.len() + 1);
result.insert(dir.join(MANIFEST_FILE_NAME));
result.extend(metadata.steps.iter().map(|step| dir.join(&step.output)));
result
}