2024-01-01 14:58:21 -05:00
|
|
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
2020-02-11 06:01:56 -05:00
|
|
|
|
2023-01-07 15:22:09 -05:00
|
|
|
use crate::args::CliOptions;
|
2023-06-14 18:29:19 -04:00
|
|
|
use crate::args::Flags;
|
|
|
|
use crate::args::TestFlags;
|
2023-08-02 12:38:10 -04:00
|
|
|
use crate::args::TestReporterConfig;
|
2021-04-28 14:17:04 -04:00
|
|
|
use crate::colors;
|
2022-03-08 18:19:02 -05:00
|
|
|
use crate::display;
|
2023-05-01 14:35:23 -04:00
|
|
|
use crate::factory::CliFactory;
|
2021-04-28 14:17:04 -04:00
|
|
|
use crate::file_fetcher::File;
|
2023-04-30 16:51:31 -04:00
|
|
|
use crate::file_fetcher::FileFetcher;
|
2024-05-16 03:09:35 -04:00
|
|
|
use crate::graph_container::MainModuleGraphContainer;
|
2023-06-14 18:29:19 -04:00
|
|
|
use crate::graph_util::has_graph_root_local_dependent_changed;
|
2021-08-17 06:08:39 -04:00
|
|
|
use crate::ops;
|
2022-11-28 17:28:54 -05:00
|
|
|
use crate::util::file_watcher;
|
|
|
|
use crate::util::fs::collect_specifiers;
|
|
|
|
use crate::util::path::get_extension;
|
2023-09-17 02:50:30 -04:00
|
|
|
use crate::util::path::is_script_ext;
|
2023-03-21 11:46:40 -04:00
|
|
|
use crate::util::path::mapped_specifier_for_tsc;
|
2024-03-07 20:16:32 -05:00
|
|
|
use crate::util::path::matches_pattern_or_exact_path;
|
2023-04-27 10:05:20 -04:00
|
|
|
use crate::worker::CliMainWorkerFactory;
|
2024-04-16 14:54:50 -04:00
|
|
|
use crate::worker::CoverageCollector;
|
2021-10-10 17:26:22 -04:00
|
|
|
|
2021-09-07 10:39:32 -04:00
|
|
|
use deno_ast::swc::common::comments::CommentKind;
|
|
|
|
use deno_ast::MediaType;
|
2022-05-20 16:40:55 -04:00
|
|
|
use deno_ast::SourceRangedForSpanned;
|
2024-01-15 19:15:39 -05:00
|
|
|
use deno_config::glob::FilePatterns;
|
2024-07-05 17:53:09 -04:00
|
|
|
use deno_config::glob::WalkEntry;
|
2023-07-26 18:12:35 -04:00
|
|
|
use deno_core::anyhow;
|
|
|
|
use deno_core::anyhow::bail;
|
|
|
|
use deno_core::anyhow::Context as _;
|
2021-07-22 07:34:29 -04:00
|
|
|
use deno_core::error::generic_error;
|
2020-09-14 12:48:57 -04:00
|
|
|
use deno_core::error::AnyError;
|
2022-04-16 13:51:12 -04:00
|
|
|
use deno_core::error::JsError;
|
2021-04-28 14:17:04 -04:00
|
|
|
use deno_core::futures::future;
|
|
|
|
use deno_core::futures::stream;
|
2021-05-26 11:47:33 -04:00
|
|
|
use deno_core::futures::FutureExt;
|
2021-04-28 14:17:04 -04:00
|
|
|
use deno_core::futures::StreamExt;
|
2023-04-13 13:43:23 -04:00
|
|
|
use deno_core::located_script_name;
|
|
|
|
use deno_core::serde_v8;
|
2024-02-16 16:22:12 -05:00
|
|
|
use deno_core::stats::RuntimeActivity;
|
|
|
|
use deno_core::stats::RuntimeActivityDiff;
|
2024-02-05 14:21:29 -05:00
|
|
|
use deno_core::stats::RuntimeActivityStats;
|
2024-02-16 16:22:12 -05:00
|
|
|
use deno_core::stats::RuntimeActivityStatsFactory;
|
2024-02-05 14:21:29 -05:00
|
|
|
use deno_core::stats::RuntimeActivityStatsFilter;
|
2024-03-14 20:19:07 -04:00
|
|
|
use deno_core::stats::RuntimeActivityType;
|
2023-08-23 19:03:05 -04:00
|
|
|
use deno_core::unsync::spawn;
|
|
|
|
use deno_core::unsync::spawn_blocking;
|
2022-04-11 12:27:17 -04:00
|
|
|
use deno_core::url::Url;
|
2023-04-13 13:43:23 -04:00
|
|
|
use deno_core::v8;
|
2021-04-28 14:17:04 -04:00
|
|
|
use deno_core::ModuleSpecifier;
|
2024-04-16 14:54:50 -04:00
|
|
|
use deno_core::OpState;
|
2023-11-21 21:45:34 -05:00
|
|
|
use deno_core::PollEventLoopOptions;
|
2023-03-04 19:39:48 -05:00
|
|
|
use deno_runtime::deno_io::Stdio;
|
|
|
|
use deno_runtime::deno_io::StdioPipe;
|
2024-06-06 23:37:53 -04:00
|
|
|
use deno_runtime::deno_permissions::Permissions;
|
|
|
|
use deno_runtime::deno_permissions::PermissionsContainer;
|
2022-09-02 16:53:23 -04:00
|
|
|
use deno_runtime::fmt_errors::format_js_error;
|
2023-05-14 17:40:01 -04:00
|
|
|
use deno_runtime::tokio_util::create_and_run_current_thread;
|
2023-10-05 06:25:15 -04:00
|
|
|
use deno_runtime::worker::MainWorker;
|
2024-04-24 15:45:49 -04:00
|
|
|
use deno_runtime::WorkerExecutionMode;
|
2022-07-15 13:09:22 -04:00
|
|
|
use indexmap::IndexMap;
|
2023-03-25 15:32:11 -04:00
|
|
|
use indexmap::IndexSet;
|
2021-08-23 10:03:57 -04:00
|
|
|
use log::Level;
|
2021-07-05 21:20:33 -04:00
|
|
|
use rand::rngs::SmallRng;
|
|
|
|
use rand::seq::SliceRandom;
|
|
|
|
use rand::SeedableRng;
|
2021-05-10 19:54:39 -04:00
|
|
|
use regex::Regex;
|
2021-04-28 14:17:04 -04:00
|
|
|
use serde::Deserialize;
|
2024-03-14 20:19:07 -04:00
|
|
|
use std::borrow::Cow;
|
2024-04-16 14:54:50 -04:00
|
|
|
use std::cell::RefCell;
|
2022-05-09 05:44:50 -04:00
|
|
|
use std::collections::BTreeMap;
|
2023-03-25 15:32:11 -04:00
|
|
|
use std::collections::BTreeSet;
|
|
|
|
use std::collections::HashMap;
|
2021-08-26 15:21:58 -04:00
|
|
|
use std::collections::HashSet;
|
2024-05-22 10:08:27 -04:00
|
|
|
use std::env;
|
2022-07-01 09:28:06 -04:00
|
|
|
use std::fmt::Write as _;
|
2024-02-16 16:22:12 -05:00
|
|
|
use std::future::poll_fn;
|
2021-10-11 09:45:02 -04:00
|
|
|
use std::io::Write;
|
2021-08-23 06:35:38 -04:00
|
|
|
use std::num::NonZeroUsize;
|
2022-11-28 17:28:54 -05:00
|
|
|
use std::path::Path;
|
2023-04-12 18:51:04 -04:00
|
|
|
use std::sync::atomic::AtomicBool;
|
2022-12-05 16:17:49 -05:00
|
|
|
use std::sync::atomic::AtomicUsize;
|
2023-04-12 18:51:04 -04:00
|
|
|
use std::sync::atomic::Ordering;
|
2021-04-28 14:17:04 -04:00
|
|
|
use std::sync::Arc;
|
2024-02-16 16:22:12 -05:00
|
|
|
use std::task::Poll;
|
2021-07-13 18:11:58 -04:00
|
|
|
use std::time::Duration;
|
2021-04-29 07:42:35 -04:00
|
|
|
use std::time::Instant;
|
2023-03-25 15:32:11 -04:00
|
|
|
use tokio::signal;
|
2021-04-28 14:17:04 -04:00
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
mod channel;
|
2023-08-02 12:38:10 -04:00
|
|
|
pub mod fmt;
|
2023-10-05 06:25:15 -04:00
|
|
|
pub mod reporters;
|
2023-08-02 12:38:10 -04:00
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
pub use channel::create_single_test_event_channel;
|
|
|
|
pub use channel::create_test_event_channel;
|
|
|
|
pub use channel::TestEventReceiver;
|
|
|
|
pub use channel::TestEventSender;
|
|
|
|
pub use channel::TestEventWorkerSender;
|
2024-02-05 14:21:29 -05:00
|
|
|
use fmt::format_sanitizer_diff;
|
2023-08-02 12:38:10 -04:00
|
|
|
pub use fmt::format_test_error;
|
2023-07-26 20:16:12 -04:00
|
|
|
use reporters::CompoundTestReporter;
|
2023-08-02 12:38:10 -04:00
|
|
|
use reporters::DotTestReporter;
|
2023-07-26 20:16:12 -04:00
|
|
|
use reporters::JunitTestReporter;
|
|
|
|
use reporters::PrettyTestReporter;
|
2023-08-25 19:19:23 -04:00
|
|
|
use reporters::TapTestReporter;
|
2023-07-26 20:16:12 -04:00
|
|
|
use reporters::TestReporter;
|
|
|
|
|
2024-02-16 16:22:12 -05:00
|
|
|
/// How many times we're allowed to spin the event loop before considering something a leak.
|
|
|
|
const MAX_SANITIZER_LOOP_SPINS: usize = 16;
|
|
|
|
|
2024-03-14 20:19:07 -04:00
|
|
|
#[derive(Default)]
|
|
|
|
struct TopLevelSanitizerStats {
|
|
|
|
map: HashMap<(RuntimeActivityType, Cow<'static, str>), usize>,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_sanitizer_item(
|
|
|
|
activity: RuntimeActivity,
|
|
|
|
) -> (RuntimeActivityType, Cow<'static, str>) {
|
|
|
|
let activity_type = activity.activity();
|
|
|
|
match activity {
|
|
|
|
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, name.into()),
|
|
|
|
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
|
|
|
|
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
|
|
|
|
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_sanitizer_item_ref(
|
|
|
|
activity: &RuntimeActivity,
|
|
|
|
) -> (RuntimeActivityType, Cow<str>) {
|
|
|
|
let activity_type = activity.activity();
|
|
|
|
match activity {
|
|
|
|
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, (*name).into()),
|
|
|
|
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
|
|
|
|
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
|
|
|
|
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
/// The test mode is used to determine how a specifier is to be tested.
|
2022-09-19 04:25:03 -04:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
2022-03-29 18:59:27 -04:00
|
|
|
pub enum TestMode {
|
2021-08-26 15:21:58 -04:00
|
|
|
/// Test as documentation, type-checking fenced code blocks.
|
|
|
|
Documentation,
|
|
|
|
/// Test as an executable module, loading the module into the isolate and running each test it
|
|
|
|
/// defines.
|
|
|
|
Executable,
|
|
|
|
/// Test as both documentation and an executable module.
|
|
|
|
Both,
|
|
|
|
}
|
|
|
|
|
2022-05-30 13:58:44 -04:00
|
|
|
#[derive(Clone, Debug, Default)]
|
|
|
|
pub struct TestFilter {
|
|
|
|
pub substring: Option<String>,
|
|
|
|
pub regex: Option<Regex>,
|
|
|
|
pub include: Option<Vec<String>>,
|
|
|
|
pub exclude: Vec<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TestFilter {
|
|
|
|
pub fn includes(&self, name: &String) -> bool {
|
|
|
|
if let Some(substring) = &self.substring {
|
|
|
|
if !name.contains(substring) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(regex) = &self.regex {
|
|
|
|
if !regex.is_match(name) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(include) = &self.include {
|
|
|
|
if !include.contains(name) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if self.exclude.contains(name) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
true
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn from_flag(flag: &Option<String>) -> Self {
|
|
|
|
let mut substring = None;
|
|
|
|
let mut regex = None;
|
|
|
|
if let Some(flag) = flag {
|
|
|
|
if flag.starts_with('/') && flag.ends_with('/') {
|
|
|
|
let rs = flag.trim_start_matches('/').trim_end_matches('/');
|
|
|
|
regex =
|
|
|
|
Some(Regex::new(rs).unwrap_or_else(|_| Regex::new("$^").unwrap()));
|
|
|
|
} else {
|
|
|
|
substring = Some(flag.clone());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Self {
|
|
|
|
substring,
|
|
|
|
regex,
|
|
|
|
..Default::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-04 19:15:54 -04:00
|
|
|
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct TestLocation {
|
|
|
|
pub file_name: String,
|
|
|
|
pub line_number: u32,
|
|
|
|
pub column_number: u32,
|
|
|
|
}
|
|
|
|
|
2024-02-27 22:30:17 -05:00
|
|
|
#[derive(Default)]
|
|
|
|
pub(crate) struct TestContainer(
|
|
|
|
TestDescriptions,
|
|
|
|
Vec<v8::Global<v8::Function>>,
|
|
|
|
);
|
|
|
|
|
|
|
|
impl TestContainer {
|
|
|
|
pub fn register(
|
|
|
|
&mut self,
|
|
|
|
description: TestDescription,
|
|
|
|
function: v8::Global<v8::Function>,
|
|
|
|
) {
|
|
|
|
self.0.tests.insert(description.id, description);
|
|
|
|
self.1.push(function)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
|
|
self.1.is_empty()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default, Debug)]
|
|
|
|
pub struct TestDescriptions {
|
|
|
|
tests: IndexMap<usize, TestDescription>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TestDescriptions {
|
|
|
|
pub fn len(&self) -> usize {
|
|
|
|
self.tests.len()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
|
|
self.tests.is_empty()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> IntoIterator for &'a TestDescriptions {
|
|
|
|
type Item = <&'a IndexMap<usize, TestDescription> as IntoIterator>::Item;
|
|
|
|
type IntoIter =
|
|
|
|
<&'a IndexMap<usize, TestDescription> as IntoIterator>::IntoIter;
|
|
|
|
fn into_iter(self) -> Self::IntoIter {
|
|
|
|
(&self.tests).into_iter()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-11 09:45:02 -04:00
|
|
|
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
|
2021-07-14 15:05:16 -04:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct TestDescription {
|
2022-07-15 13:09:22 -04:00
|
|
|
pub id: usize,
|
2021-07-14 15:05:16 -04:00
|
|
|
pub name: String,
|
2023-04-13 13:43:23 -04:00
|
|
|
pub ignore: bool,
|
|
|
|
pub only: bool,
|
2022-07-15 13:09:22 -04:00
|
|
|
pub origin: String,
|
2022-05-04 19:15:54 -04:00
|
|
|
pub location: TestLocation,
|
2024-02-05 14:21:29 -05:00
|
|
|
pub sanitize_ops: bool,
|
|
|
|
pub sanitize_resources: bool,
|
2021-07-14 15:05:16 -04:00
|
|
|
}
|
|
|
|
|
2024-02-05 12:27:17 -05:00
|
|
|
/// May represent a failure of a test or test step.
|
|
|
|
#[derive(Debug, Clone, PartialEq, Deserialize, Eq, Hash)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct TestFailureDescription {
|
|
|
|
pub id: usize,
|
|
|
|
pub name: String,
|
|
|
|
pub origin: String,
|
|
|
|
pub location: TestLocation,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<&TestDescription> for TestFailureDescription {
|
|
|
|
fn from(value: &TestDescription) -> Self {
|
|
|
|
Self {
|
|
|
|
id: value.id,
|
|
|
|
name: value.name.clone(),
|
|
|
|
origin: value.origin.clone(),
|
|
|
|
location: value.location.clone(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-25 15:32:11 -04:00
|
|
|
#[allow(clippy::derive_partial_eq_without_eq)]
|
|
|
|
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub enum TestFailure {
|
|
|
|
JsError(Box<JsError>),
|
|
|
|
FailedSteps(usize),
|
|
|
|
IncompleteSteps,
|
2024-02-16 16:22:12 -05:00
|
|
|
Leaked(Vec<String>, Vec<String>), // Details, trailer notes
|
2023-03-25 15:32:11 -04:00
|
|
|
// The rest are for steps only.
|
|
|
|
Incomplete,
|
|
|
|
OverlapsWithSanitizers(IndexSet<String>), // Long names of overlapped tests
|
|
|
|
HasSanitizersAndOverlaps(IndexSet<String>), // Long names of overlapped tests
|
|
|
|
}
|
|
|
|
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
impl std::fmt::Display for TestFailure {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
2023-03-25 15:32:11 -04:00
|
|
|
match self {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
TestFailure::JsError(js_error) => {
|
|
|
|
write!(f, "{}", format_test_error(js_error))
|
|
|
|
}
|
|
|
|
TestFailure::FailedSteps(1) => write!(f, "1 test step failed."),
|
|
|
|
TestFailure::FailedSteps(n) => write!(f, "{n} test steps failed."),
|
|
|
|
TestFailure::IncompleteSteps => {
|
|
|
|
write!(f, "Completed while steps were still running. Ensure all steps are awaited with `await t.step(...)`.")
|
|
|
|
}
|
|
|
|
TestFailure::Incomplete => {
|
|
|
|
write!(
|
|
|
|
f,
|
|
|
|
"Didn't complete before parent. Await step with `await t.step(...)`."
|
|
|
|
)
|
|
|
|
}
|
2024-02-16 16:22:12 -05:00
|
|
|
TestFailure::Leaked(details, trailer_notes) => {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "Leaks detected:")?;
|
2023-03-25 15:32:11 -04:00
|
|
|
for detail in details {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "\n - {}", detail)?;
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
2024-02-16 16:22:12 -05:00
|
|
|
for trailer in trailer_notes {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "\n{}", trailer)?;
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
Ok(())
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
|
|
|
TestFailure::OverlapsWithSanitizers(long_names) => {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "Started test step while another test step with sanitizers was running:")?;
|
2023-03-25 15:32:11 -04:00
|
|
|
for long_name in long_names {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "\n * {}", long_name)?;
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
Ok(())
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
|
|
|
TestFailure::HasSanitizersAndOverlaps(long_names) => {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "Started test step with sanitizers while another test step was running:")?;
|
2023-03-25 15:32:11 -04:00
|
|
|
for long_name in long_names {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
write!(f, "\n * {}", long_name)?;
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
Ok(())
|
2023-03-25 15:32:11 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TestFailure {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
pub fn overview(&self) -> String {
|
|
|
|
match self {
|
|
|
|
TestFailure::JsError(js_error) => js_error.exception_message.clone(),
|
|
|
|
TestFailure::FailedSteps(1) => "1 test step failed".to_string(),
|
|
|
|
TestFailure::FailedSteps(n) => format!("{n} test steps failed"),
|
|
|
|
TestFailure::IncompleteSteps => {
|
|
|
|
"Completed while steps were still running".to_string()
|
|
|
|
}
|
|
|
|
TestFailure::Incomplete => "Didn't complete before parent".to_string(),
|
|
|
|
TestFailure::Leaked(_, _) => "Leaks detected".to_string(),
|
|
|
|
TestFailure::OverlapsWithSanitizers(_) => {
|
|
|
|
"Started test step while another test step with sanitizers was running"
|
|
|
|
.to_string()
|
|
|
|
}
|
|
|
|
TestFailure::HasSanitizersAndOverlaps(_) => {
|
|
|
|
"Started test step with sanitizers while another test step was running"
|
|
|
|
.to_string()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn detail(&self) -> String {
|
|
|
|
self.to_string()
|
|
|
|
}
|
|
|
|
|
2023-03-25 15:32:11 -04:00
|
|
|
fn format_label(&self) -> String {
|
|
|
|
match self {
|
|
|
|
TestFailure::Incomplete => colors::gray("INCOMPLETE").to_string(),
|
|
|
|
_ => colors::red("FAILED").to_string(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn format_inline_summary(&self) -> Option<String> {
|
|
|
|
match self {
|
|
|
|
TestFailure::FailedSteps(1) => Some("due to 1 failed step".to_string()),
|
|
|
|
TestFailure::FailedSteps(n) => Some(format!("due to {} failed steps", n)),
|
|
|
|
TestFailure::IncompleteSteps => {
|
|
|
|
Some("due to incomplete steps".to_string())
|
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn hide_in_summary(&self) -> bool {
|
|
|
|
// These failure variants are hidden in summaries because they are caused
|
|
|
|
// by child errors that will be summarized separately.
|
|
|
|
matches!(
|
|
|
|
self,
|
|
|
|
TestFailure::FailedSteps(_) | TestFailure::IncompleteSteps
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-19 04:25:03 -04:00
|
|
|
#[allow(clippy::derive_partial_eq_without_eq)]
|
2021-04-28 14:17:04 -04:00
|
|
|
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub enum TestResult {
|
|
|
|
Ok,
|
|
|
|
Ignored,
|
2023-03-25 15:32:11 -04:00
|
|
|
Failed(TestFailure),
|
2022-07-15 13:09:22 -04:00
|
|
|
Cancelled,
|
2021-04-28 14:17:04 -04:00
|
|
|
}
|
|
|
|
|
2022-09-19 04:25:03 -04:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq, Deserialize)]
|
2021-10-11 09:45:02 -04:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct TestStepDescription {
|
2022-07-15 13:09:22 -04:00
|
|
|
pub id: usize,
|
2021-10-11 09:45:02 -04:00
|
|
|
pub name: String,
|
2022-07-15 13:09:22 -04:00
|
|
|
pub origin: String,
|
|
|
|
pub location: TestLocation,
|
|
|
|
pub level: usize,
|
|
|
|
pub parent_id: usize,
|
|
|
|
pub root_id: usize,
|
|
|
|
pub root_name: String,
|
|
|
|
}
|
|
|
|
|
2022-09-19 04:25:03 -04:00
|
|
|
#[allow(clippy::derive_partial_eq_without_eq)]
|
2021-10-11 09:45:02 -04:00
|
|
|
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub enum TestStepResult {
|
|
|
|
Ok,
|
|
|
|
Ignored,
|
2023-03-25 15:32:11 -04:00
|
|
|
Failed(TestFailure),
|
2021-10-11 09:45:02 -04:00
|
|
|
}
|
|
|
|
|
2022-09-19 04:25:03 -04:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq, Deserialize)]
|
2021-07-14 15:05:16 -04:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct TestPlan {
|
|
|
|
pub origin: String,
|
|
|
|
pub total: usize,
|
|
|
|
pub filtered_out: usize,
|
|
|
|
pub used_only: bool,
|
2021-04-28 14:17:04 -04:00
|
|
|
}
|
2020-02-11 06:01:56 -05:00
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Deserialize)]
|
|
|
|
pub enum TestStdioStream {
|
|
|
|
Stdout,
|
|
|
|
Stderr,
|
|
|
|
}
|
|
|
|
|
2024-02-27 22:30:17 -05:00
|
|
|
#[derive(Debug)]
|
2021-07-14 15:05:16 -04:00
|
|
|
pub enum TestEvent {
|
2024-02-27 22:30:17 -05:00
|
|
|
Register(Arc<TestDescriptions>),
|
2021-07-14 15:05:16 -04:00
|
|
|
Plan(TestPlan),
|
2022-07-15 13:09:22 -04:00
|
|
|
Wait(usize),
|
2024-06-14 07:40:57 -04:00
|
|
|
Output(Vec<u8>),
|
2024-05-22 10:08:27 -04:00
|
|
|
Slow(usize, u64),
|
2022-07-15 13:09:22 -04:00
|
|
|
Result(usize, TestResult, u64),
|
2022-05-09 05:44:50 -04:00
|
|
|
UncaughtError(String, Box<JsError>),
|
2022-07-15 13:09:22 -04:00
|
|
|
StepRegister(TestStepDescription),
|
|
|
|
StepWait(usize),
|
|
|
|
StepResult(usize, TestStepResult, u64),
|
2024-03-01 16:26:57 -05:00
|
|
|
/// Indicates that this worker has completed running tests.
|
2024-02-28 17:12:21 -05:00
|
|
|
Completed,
|
|
|
|
/// Indicates that the user has cancelled the test run with Ctrl+C and
|
|
|
|
/// the run should be aborted.
|
2023-03-25 15:32:11 -04:00
|
|
|
Sigint,
|
2024-02-28 17:12:21 -05:00
|
|
|
/// Used by the REPL to force a report to end without closing the worker
|
|
|
|
/// or receiver.
|
|
|
|
ForceEndReport,
|
2021-04-30 11:56:47 -04:00
|
|
|
}
|
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
impl TestEvent {
|
|
|
|
// Certain messages require us to ensure that all output has been drained to ensure proper
|
|
|
|
// interleaving of output messages.
|
|
|
|
pub fn requires_stdio_sync(&self) -> bool {
|
|
|
|
matches!(
|
|
|
|
self,
|
2024-02-28 17:12:21 -05:00
|
|
|
TestEvent::Plan(..)
|
|
|
|
| TestEvent::Result(..)
|
2024-02-23 13:11:15 -05:00
|
|
|
| TestEvent::StepWait(..)
|
|
|
|
| TestEvent::StepResult(..)
|
|
|
|
| TestEvent::UncaughtError(..)
|
|
|
|
| TestEvent::ForceEndReport
|
2024-02-28 17:12:21 -05:00
|
|
|
| TestEvent::Completed
|
2024-02-23 13:11:15 -05:00
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-13 18:11:58 -04:00
|
|
|
#[derive(Debug, Clone, Deserialize)]
|
|
|
|
pub struct TestSummary {
|
|
|
|
pub total: usize,
|
|
|
|
pub passed: usize,
|
|
|
|
pub failed: usize,
|
|
|
|
pub ignored: usize,
|
2021-11-15 10:20:37 -05:00
|
|
|
pub passed_steps: usize,
|
|
|
|
pub failed_steps: usize,
|
|
|
|
pub ignored_steps: usize,
|
2021-07-13 18:11:58 -04:00
|
|
|
pub filtered_out: usize,
|
|
|
|
pub measured: usize,
|
2024-02-05 12:27:17 -05:00
|
|
|
pub failures: Vec<(TestFailureDescription, TestFailure)>,
|
2022-05-09 05:44:50 -04:00
|
|
|
pub uncaught_errors: Vec<(String, Box<JsError>)>,
|
2021-07-13 18:11:58 -04:00
|
|
|
}
|
|
|
|
|
2022-07-15 13:09:22 -04:00
|
|
|
#[derive(Debug, Clone)]
|
2023-04-27 10:05:20 -04:00
|
|
|
struct TestSpecifiersOptions {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
cwd: Url,
|
2021-12-30 11:18:30 -05:00
|
|
|
concurrent_jobs: NonZeroUsize,
|
|
|
|
fail_fast: Option<NonZeroUsize>,
|
2023-04-27 10:05:20 -04:00
|
|
|
log_level: Option<log::Level>,
|
2023-09-06 08:54:21 -04:00
|
|
|
filter: bool,
|
2023-04-27 10:05:20 -04:00
|
|
|
specifier: TestSpecifierOptions,
|
2023-08-02 12:38:10 -04:00
|
|
|
reporter: TestReporterConfig,
|
2023-08-02 22:05:34 -04:00
|
|
|
junit_path: Option<String>,
|
2023-04-27 10:05:20 -04:00
|
|
|
}
|
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
#[derive(Debug, Default, Clone)]
|
2023-04-27 10:05:20 -04:00
|
|
|
pub struct TestSpecifierOptions {
|
|
|
|
pub shuffle: Option<u64>,
|
|
|
|
pub filter: TestFilter,
|
2024-02-28 11:12:43 -05:00
|
|
|
pub trace_leaks: bool,
|
2021-12-30 11:18:30 -05:00
|
|
|
}
|
|
|
|
|
2021-07-13 18:11:58 -04:00
|
|
|
impl TestSummary {
|
2022-03-29 18:59:27 -04:00
|
|
|
pub fn new() -> TestSummary {
|
2021-07-13 18:11:58 -04:00
|
|
|
TestSummary {
|
|
|
|
total: 0,
|
|
|
|
passed: 0,
|
|
|
|
failed: 0,
|
|
|
|
ignored: 0,
|
2021-11-15 10:20:37 -05:00
|
|
|
passed_steps: 0,
|
|
|
|
failed_steps: 0,
|
|
|
|
ignored_steps: 0,
|
2021-07-13 18:11:58 -04:00
|
|
|
filtered_out: 0,
|
|
|
|
measured: 0,
|
|
|
|
failures: Vec::new(),
|
2022-05-09 05:44:50 -04:00
|
|
|
uncaught_errors: Vec::new(),
|
2021-07-13 18:11:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn has_failed(&self) -> bool {
|
|
|
|
self.failed > 0 || !self.failures.is_empty()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-10 18:12:33 -04:00
|
|
|
fn get_test_reporter(options: &TestSpecifiersOptions) -> Box<dyn TestReporter> {
|
2023-08-02 12:38:10 -04:00
|
|
|
let parallel = options.concurrent_jobs.get() > 1;
|
2023-08-02 22:05:34 -04:00
|
|
|
let reporter: Box<dyn TestReporter> = match &options.reporter {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
TestReporterConfig::Dot => {
|
|
|
|
Box::new(DotTestReporter::new(options.cwd.clone()))
|
|
|
|
}
|
2023-08-02 12:38:10 -04:00
|
|
|
TestReporterConfig::Pretty => Box::new(PrettyTestReporter::new(
|
|
|
|
parallel,
|
|
|
|
options.log_level != Some(Level::Error),
|
2023-09-06 08:54:21 -04:00
|
|
|
options.filter,
|
2023-10-05 06:25:15 -04:00
|
|
|
false,
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
options.cwd.clone(),
|
2023-08-02 12:38:10 -04:00
|
|
|
)),
|
2023-08-02 22:05:34 -04:00
|
|
|
TestReporterConfig::Junit => {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
Box::new(JunitTestReporter::new(options.cwd.clone(), "-".to_string()))
|
2022-04-18 09:22:23 -04:00
|
|
|
}
|
2023-08-25 19:19:23 -04:00
|
|
|
TestReporterConfig::Tap => Box::new(TapTestReporter::new(
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
options.cwd.clone(),
|
2023-08-25 19:19:23 -04:00
|
|
|
options.concurrent_jobs > NonZeroUsize::new(1).unwrap(),
|
|
|
|
)),
|
2023-08-02 22:05:34 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(junit_path) = &options.junit_path {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
let junit = Box::new(JunitTestReporter::new(
|
|
|
|
options.cwd.clone(),
|
|
|
|
junit_path.to_string(),
|
|
|
|
));
|
2023-08-02 22:05:34 -04:00
|
|
|
return Box::new(CompoundTestReporter::new(vec![reporter, junit]));
|
2022-04-18 09:22:23 -04:00
|
|
|
}
|
2023-08-02 22:05:34 -04:00
|
|
|
|
|
|
|
reporter
|
2022-04-18 09:22:23 -04:00
|
|
|
}
|
|
|
|
|
2024-04-16 14:54:50 -04:00
|
|
|
async fn configure_main_worker(
|
|
|
|
worker_factory: Arc<CliMainWorkerFactory>,
|
|
|
|
specifier: &Url,
|
|
|
|
permissions: Permissions,
|
|
|
|
worker_sender: TestEventWorkerSender,
|
|
|
|
options: &TestSpecifierOptions,
|
|
|
|
) -> Result<(Option<Box<dyn CoverageCollector>>, MainWorker), anyhow::Error> {
|
|
|
|
let mut worker = worker_factory
|
|
|
|
.create_custom_worker(
|
2024-04-24 15:45:49 -04:00
|
|
|
WorkerExecutionMode::Test,
|
2024-04-16 14:54:50 -04:00
|
|
|
specifier.clone(),
|
|
|
|
PermissionsContainer::new(permissions),
|
|
|
|
vec![ops::testing::deno_test::init_ops(worker_sender.sender)],
|
|
|
|
Stdio {
|
|
|
|
stdin: StdioPipe::inherit(),
|
|
|
|
stdout: StdioPipe::file(worker_sender.stdout),
|
|
|
|
stderr: StdioPipe::file(worker_sender.stderr),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
let coverage_collector = worker.maybe_setup_coverage_collector().await?;
|
|
|
|
if options.trace_leaks {
|
|
|
|
worker.execute_script_static(
|
|
|
|
located_script_name!(),
|
|
|
|
"Deno[Deno.internal].core.setLeakTracingEnabled(true);",
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
let res = worker.execute_side_module_possibly_with_npm().await;
|
|
|
|
let mut worker = worker.into_main_worker();
|
|
|
|
match res {
|
|
|
|
Ok(()) => Ok(()),
|
|
|
|
Err(error) => {
|
|
|
|
// TODO(mmastrac): It would be nice to avoid having this error pattern repeated
|
|
|
|
if error.is::<JsError>() {
|
|
|
|
send_test_event(
|
|
|
|
&worker.js_runtime.op_state(),
|
|
|
|
TestEvent::UncaughtError(
|
|
|
|
specifier.to_string(),
|
|
|
|
Box::new(error.downcast::<JsError>().unwrap()),
|
|
|
|
),
|
|
|
|
)?;
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
Err(error)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}?;
|
|
|
|
Ok((coverage_collector, worker))
|
|
|
|
}
|
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
/// Test a single specifier as documentation containing test programs, an executable test module or
|
|
|
|
/// both.
|
2023-04-13 13:43:23 -04:00
|
|
|
pub async fn test_specifier(
|
2023-05-14 17:40:01 -04:00
|
|
|
worker_factory: Arc<CliMainWorkerFactory>,
|
2021-04-28 14:17:04 -04:00
|
|
|
permissions: Permissions,
|
2021-08-26 15:21:58 -04:00
|
|
|
specifier: ModuleSpecifier,
|
2024-04-16 14:54:50 -04:00
|
|
|
worker_sender: TestEventWorkerSender,
|
2022-12-05 16:17:49 -05:00
|
|
|
fail_fast_tracker: FailFastTracker,
|
2023-05-14 17:40:01 -04:00
|
|
|
options: TestSpecifierOptions,
|
2023-12-05 11:26:06 -05:00
|
|
|
) -> Result<(), AnyError> {
|
2024-04-16 14:54:50 -04:00
|
|
|
if fail_fast_tracker.should_stop() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
let (coverage_collector, mut worker) = configure_main_worker(
|
2023-12-05 11:26:06 -05:00
|
|
|
worker_factory,
|
2024-04-16 14:54:50 -04:00
|
|
|
&specifier,
|
2023-12-05 11:26:06 -05:00
|
|
|
permissions,
|
2024-04-16 14:54:50 -04:00
|
|
|
worker_sender,
|
|
|
|
&options,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
match test_specifier_inner(
|
|
|
|
&mut worker,
|
|
|
|
coverage_collector,
|
2023-12-05 11:26:06 -05:00
|
|
|
specifier.clone(),
|
|
|
|
fail_fast_tracker,
|
|
|
|
options,
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
{
|
|
|
|
Ok(()) => Ok(()),
|
|
|
|
Err(error) => {
|
2024-04-16 14:54:50 -04:00
|
|
|
// TODO(mmastrac): It would be nice to avoid having this error pattern repeated
|
2023-12-05 11:26:06 -05:00
|
|
|
if error.is::<JsError>() {
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&worker.js_runtime.op_state(),
|
|
|
|
TestEvent::UncaughtError(
|
|
|
|
specifier.to_string(),
|
|
|
|
Box::new(error.downcast::<JsError>().unwrap()),
|
|
|
|
),
|
|
|
|
)?;
|
2023-12-05 11:26:06 -05:00
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
Err(error)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Test a single specifier as documentation containing test programs, an executable test module or
|
|
|
|
/// both.
|
2024-02-23 13:11:15 -05:00
|
|
|
#[allow(clippy::too_many_arguments)]
|
2023-12-05 11:26:06 -05:00
|
|
|
async fn test_specifier_inner(
|
2024-04-16 14:54:50 -04:00
|
|
|
worker: &mut MainWorker,
|
|
|
|
mut coverage_collector: Option<Box<dyn CoverageCollector>>,
|
2023-12-05 11:26:06 -05:00
|
|
|
specifier: ModuleSpecifier,
|
|
|
|
fail_fast_tracker: FailFastTracker,
|
|
|
|
options: TestSpecifierOptions,
|
2021-04-28 14:17:04 -04:00
|
|
|
) -> Result<(), AnyError> {
|
2023-12-05 11:26:06 -05:00
|
|
|
// Ensure that there are no pending exceptions before we start running tests
|
|
|
|
worker.run_up_to_duration(Duration::from_millis(0)).await?;
|
|
|
|
|
2024-04-15 14:08:33 -04:00
|
|
|
worker.dispatch_load_event()?;
|
2023-04-13 13:43:23 -04:00
|
|
|
|
2024-04-16 14:54:50 -04:00
|
|
|
run_tests_for_worker(worker, &specifier, &options, &fail_fast_tracker)
|
2023-10-05 06:25:15 -04:00
|
|
|
.await?;
|
|
|
|
|
|
|
|
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
|
|
|
|
// event loop to continue beyond what's needed to await results.
|
2024-04-15 14:08:33 -04:00
|
|
|
worker.dispatch_beforeunload_event()?;
|
|
|
|
worker.dispatch_unload_event()?;
|
2023-10-05 06:25:15 -04:00
|
|
|
|
2024-03-01 16:26:57 -05:00
|
|
|
// Ensure all output has been flushed
|
2024-04-16 14:54:50 -04:00
|
|
|
_ = worker
|
|
|
|
.js_runtime
|
|
|
|
.op_state()
|
|
|
|
.borrow_mut()
|
|
|
|
.borrow_mut::<TestEventSender>()
|
|
|
|
.flush();
|
2024-03-01 16:26:57 -05:00
|
|
|
|
2023-12-05 11:26:06 -05:00
|
|
|
// Ensure the worker has settled so we can catch any remaining unhandled rejections. We don't
|
|
|
|
// want to wait forever here.
|
|
|
|
worker.run_up_to_duration(Duration::from_millis(0)).await?;
|
|
|
|
|
2024-04-16 14:54:50 -04:00
|
|
|
if let Some(coverage_collector) = &mut coverage_collector {
|
2023-10-05 06:25:15 -04:00
|
|
|
worker
|
2023-11-21 21:45:34 -05:00
|
|
|
.js_runtime
|
2023-12-13 10:07:26 -05:00
|
|
|
.with_event_loop_future(
|
2023-11-21 21:45:34 -05:00
|
|
|
coverage_collector.stop_collecting().boxed_local(),
|
2023-12-13 10:07:26 -05:00
|
|
|
PollEventLoopOptions::default(),
|
2023-11-21 21:45:34 -05:00
|
|
|
)
|
2023-10-05 06:25:15 -04:00
|
|
|
.await?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-10-05 16:45:11 -04:00
|
|
|
pub fn worker_has_tests(worker: &mut MainWorker) -> bool {
|
|
|
|
let state_rc = worker.js_runtime.op_state();
|
|
|
|
let state = state_rc.borrow();
|
2024-02-27 22:30:17 -05:00
|
|
|
!state.borrow::<TestContainer>().is_empty()
|
2023-10-05 16:45:11 -04:00
|
|
|
}
|
|
|
|
|
2024-02-16 16:22:12 -05:00
|
|
|
/// Yields to tokio to allow async work to process, and then polls
|
|
|
|
/// the event loop once.
|
|
|
|
#[must_use = "The event loop result should be checked"]
|
|
|
|
pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), AnyError> {
|
|
|
|
// Allow any ops that to do work in the tokio event loop to do so
|
|
|
|
tokio::task::yield_now().await;
|
|
|
|
// Spin the event loop once
|
|
|
|
poll_fn(|cx| {
|
|
|
|
if let Poll::Ready(Err(err)) = worker
|
|
|
|
.js_runtime
|
|
|
|
.poll_event_loop(cx, PollEventLoopOptions::default())
|
|
|
|
{
|
|
|
|
return Poll::Ready(Err(err));
|
|
|
|
}
|
|
|
|
Poll::Ready(Ok(()))
|
|
|
|
})
|
|
|
|
.await
|
|
|
|
}
|
|
|
|
|
2024-04-16 14:54:50 -04:00
|
|
|
pub fn send_test_event(
|
|
|
|
op_state: &RefCell<OpState>,
|
|
|
|
event: TestEvent,
|
|
|
|
) -> Result<(), AnyError> {
|
|
|
|
Ok(
|
|
|
|
op_state
|
|
|
|
.borrow_mut()
|
|
|
|
.borrow_mut::<TestEventSender>()
|
|
|
|
.send(event)?,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
pub async fn run_tests_for_worker(
|
|
|
|
worker: &mut MainWorker,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
options: &TestSpecifierOptions,
|
|
|
|
fail_fast_tracker: &FailFastTracker,
|
|
|
|
) -> Result<(), AnyError> {
|
2024-04-16 14:54:50 -04:00
|
|
|
let state_rc = worker.js_runtime.op_state();
|
|
|
|
// Take whatever tests have been registered
|
|
|
|
let TestContainer(tests, test_functions) =
|
|
|
|
std::mem::take(&mut *state_rc.borrow_mut().borrow_mut::<TestContainer>());
|
|
|
|
|
2024-02-27 22:30:17 -05:00
|
|
|
let tests: Arc<TestDescriptions> = tests.into();
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(&state_rc, TestEvent::Register(tests.clone()))?;
|
2024-02-28 17:12:21 -05:00
|
|
|
let res = run_tests_for_worker_inner(
|
|
|
|
worker,
|
|
|
|
specifier,
|
|
|
|
tests,
|
|
|
|
test_functions,
|
|
|
|
options,
|
|
|
|
fail_fast_tracker,
|
|
|
|
)
|
|
|
|
.await;
|
2024-04-16 14:54:50 -04:00
|
|
|
|
|
|
|
_ = send_test_event(&state_rc, TestEvent::Completed);
|
2024-02-28 17:12:21 -05:00
|
|
|
res
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn run_tests_for_worker_inner(
|
|
|
|
worker: &mut MainWorker,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
tests: Arc<TestDescriptions>,
|
|
|
|
test_functions: Vec<v8::Global<v8::Function>>,
|
|
|
|
options: &TestSpecifierOptions,
|
|
|
|
fail_fast_tracker: &FailFastTracker,
|
|
|
|
) -> Result<(), AnyError> {
|
2023-04-13 13:43:23 -04:00
|
|
|
let unfiltered = tests.len();
|
2024-04-16 14:54:50 -04:00
|
|
|
let state_rc = worker.js_runtime.op_state();
|
2024-02-28 17:12:21 -05:00
|
|
|
|
|
|
|
// Build the test plan in a single pass
|
|
|
|
let mut tests_to_run = Vec::with_capacity(tests.len());
|
|
|
|
let mut used_only = false;
|
|
|
|
for ((_, d), f) in tests.tests.iter().zip(test_functions) {
|
|
|
|
if !options.filter.includes(&d.name) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we've seen an "only: true" test, the remaining tests must be "only: true" to be added
|
|
|
|
if used_only && !d.only {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If this is the first "only: true" test we've seen, clear the other tests since they were
|
|
|
|
// only: false.
|
|
|
|
if d.only && !used_only {
|
|
|
|
used_only = true;
|
|
|
|
tests_to_run.clear();
|
|
|
|
}
|
|
|
|
tests_to_run.push((d, f));
|
|
|
|
}
|
|
|
|
|
2023-04-27 10:05:20 -04:00
|
|
|
if let Some(seed) = options.shuffle {
|
2024-02-28 17:12:21 -05:00
|
|
|
tests_to_run.shuffle(&mut SmallRng::seed_from_u64(seed));
|
2023-04-13 13:43:23 -04:00
|
|
|
}
|
2024-02-28 17:12:21 -05:00
|
|
|
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Plan(TestPlan {
|
|
|
|
origin: specifier.to_string(),
|
|
|
|
total: tests_to_run.len(),
|
|
|
|
filtered_out: unfiltered - tests_to_run.len(),
|
|
|
|
used_only,
|
|
|
|
}),
|
|
|
|
)?;
|
2024-02-28 17:12:21 -05:00
|
|
|
|
2023-04-13 13:43:23 -04:00
|
|
|
let mut had_uncaught_error = false;
|
2024-02-05 14:21:29 -05:00
|
|
|
let stats = worker.js_runtime.runtime_activity_stats_factory();
|
2024-02-16 16:22:12 -05:00
|
|
|
let ops = worker.js_runtime.op_names();
|
|
|
|
|
|
|
|
// These particular ops may start and stop independently of tests, so we just filter them out
|
|
|
|
// completely.
|
|
|
|
let op_id_host_recv_message = ops
|
|
|
|
.iter()
|
|
|
|
.position(|op| *op == "op_host_recv_message")
|
|
|
|
.unwrap();
|
|
|
|
let op_id_host_recv_ctrl = ops
|
|
|
|
.iter()
|
|
|
|
.position(|op| *op == "op_host_recv_ctrl")
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
// For consistency between tests with and without sanitizers, we _always_ include
|
|
|
|
// the actual sanitizer capture before and after a test, but a test that ignores resource
|
|
|
|
// or op sanitization simply doesn't throw if one of these constraints is violated.
|
|
|
|
let mut filter = RuntimeActivityStatsFilter::default();
|
|
|
|
filter = filter.with_resources();
|
|
|
|
filter = filter.with_ops();
|
2024-03-01 13:15:18 -05:00
|
|
|
filter = filter.with_timers();
|
2024-02-16 16:22:12 -05:00
|
|
|
filter = filter.omit_op(op_id_host_recv_ctrl as _);
|
|
|
|
filter = filter.omit_op(op_id_host_recv_message as _);
|
2024-02-05 14:21:29 -05:00
|
|
|
|
2024-03-14 20:19:07 -04:00
|
|
|
// Count the top-level stats so we can filter them out if they complete and restart within
|
|
|
|
// a test.
|
|
|
|
let top_level_stats = stats.clone().capture(&filter);
|
|
|
|
let mut top_level = TopLevelSanitizerStats::default();
|
|
|
|
for activity in top_level_stats.dump().active {
|
|
|
|
top_level
|
|
|
|
.map
|
|
|
|
.entry(get_sanitizer_item(activity))
|
|
|
|
.and_modify(|n| *n += 1)
|
|
|
|
.or_insert(1);
|
|
|
|
}
|
|
|
|
|
2024-02-28 17:12:21 -05:00
|
|
|
for (desc, function) in tests_to_run.into_iter() {
|
2023-04-13 13:43:23 -04:00
|
|
|
if fail_fast_tracker.should_stop() {
|
|
|
|
break;
|
|
|
|
}
|
2024-02-16 16:22:12 -05:00
|
|
|
|
|
|
|
// Each test needs a fresh reqwest connection pool to avoid inter-test weirdness with connections
|
|
|
|
// failing. If we don't do this, a connection to a test server we just tore down might be re-used in
|
|
|
|
// the next test.
|
|
|
|
// TODO(mmastrac): this should be some sort of callback that we can implement for any subsystem
|
|
|
|
worker
|
|
|
|
.js_runtime
|
|
|
|
.op_state()
|
|
|
|
.borrow_mut()
|
2024-07-17 19:37:31 -04:00
|
|
|
.try_take::<deno_runtime::deno_fetch::Client>();
|
2024-02-16 16:22:12 -05:00
|
|
|
|
2023-04-13 13:43:23 -04:00
|
|
|
if desc.ignore {
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Result(desc.id, TestResult::Ignored, 0),
|
|
|
|
)?;
|
2023-04-13 13:43:23 -04:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if had_uncaught_error {
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Result(desc.id, TestResult::Cancelled, 0),
|
|
|
|
)?;
|
2023-04-13 13:43:23 -04:00
|
|
|
continue;
|
|
|
|
}
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(&state_rc, TestEvent::Wait(desc.id))?;
|
2023-06-07 17:50:14 -04:00
|
|
|
|
2024-02-05 14:21:29 -05:00
|
|
|
// Poll event loop once, to allow all ops that are already resolved, but haven't
|
|
|
|
// responded to settle.
|
2024-03-04 09:28:57 -05:00
|
|
|
// TODO(mmastrac): we should provide an API to poll the event loop until no further
|
2024-02-05 14:21:29 -05:00
|
|
|
// progress is made.
|
2024-02-16 16:22:12 -05:00
|
|
|
poll_event_loop(worker).await?;
|
2023-06-07 17:50:14 -04:00
|
|
|
|
2024-02-16 16:22:12 -05:00
|
|
|
// We always capture stats, regardless of sanitization state
|
|
|
|
let before = stats.clone().capture(&filter);
|
2024-02-05 14:21:29 -05:00
|
|
|
|
2024-03-11 21:30:15 -04:00
|
|
|
let earlier = Instant::now();
|
2023-12-13 10:07:26 -05:00
|
|
|
let call = worker.js_runtime.call(&function);
|
2024-05-22 10:08:27 -04:00
|
|
|
|
|
|
|
let slow_state_rc = state_rc.clone();
|
|
|
|
let slow_test_id = desc.id;
|
|
|
|
let slow_test_warning = spawn(async move {
|
|
|
|
// The slow test warning should pop up every DENO_SLOW_TEST_TIMEOUT*(2**n) seconds,
|
|
|
|
// with a duration that is doubling each time. So for a warning time of 60s,
|
|
|
|
// we should get a warning at 60s, 120s, 240s, etc.
|
|
|
|
let base_timeout = env::var("DENO_SLOW_TEST_TIMEOUT").unwrap_or_default();
|
|
|
|
let base_timeout = base_timeout.parse().unwrap_or(60).max(1);
|
|
|
|
let mut multiplier = 1;
|
|
|
|
let mut elapsed = 0;
|
|
|
|
loop {
|
|
|
|
tokio::time::sleep(Duration::from_secs(
|
|
|
|
base_timeout * (multiplier - elapsed),
|
|
|
|
))
|
|
|
|
.await;
|
|
|
|
if send_test_event(
|
|
|
|
&slow_state_rc,
|
|
|
|
TestEvent::Slow(
|
|
|
|
slow_test_id,
|
|
|
|
Duration::from_secs(base_timeout * multiplier).as_millis() as _,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.is_err()
|
|
|
|
{
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
multiplier *= 2;
|
|
|
|
elapsed += 1;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let result = worker
|
2023-12-13 10:07:26 -05:00
|
|
|
.js_runtime
|
|
|
|
.with_event_loop_promise(call, PollEventLoopOptions::default())
|
2024-05-22 10:08:27 -04:00
|
|
|
.await;
|
|
|
|
slow_test_warning.abort();
|
|
|
|
let result = match result {
|
2023-04-13 13:43:23 -04:00
|
|
|
Ok(r) => r,
|
|
|
|
Err(error) => {
|
|
|
|
if error.is::<JsError>() {
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::UncaughtError(
|
|
|
|
specifier.to_string(),
|
|
|
|
Box::new(error.downcast::<JsError>().unwrap()),
|
|
|
|
),
|
|
|
|
)?;
|
2023-04-13 13:43:23 -04:00
|
|
|
fail_fast_tracker.add_failure();
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Result(desc.id, TestResult::Cancelled, 0),
|
|
|
|
)?;
|
2023-04-13 13:43:23 -04:00
|
|
|
had_uncaught_error = true;
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
return Err(error);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2024-02-16 16:22:12 -05:00
|
|
|
|
2024-03-13 22:49:54 -04:00
|
|
|
// Check the result before we check for leaks
|
|
|
|
let result = {
|
|
|
|
let scope = &mut worker.js_runtime.handle_scope();
|
|
|
|
let result = v8::Local::new(scope, result);
|
|
|
|
serde_v8::from_v8::<TestResult>(scope, result)?
|
|
|
|
};
|
|
|
|
if matches!(result, TestResult::Failed(_)) {
|
|
|
|
fail_fast_tracker.add_failure();
|
|
|
|
let elapsed = earlier.elapsed().as_millis();
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Result(desc.id, result, elapsed as u64),
|
|
|
|
)?;
|
2024-03-13 22:49:54 -04:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-02-16 16:22:12 -05:00
|
|
|
// Await activity stabilization
|
|
|
|
if let Some(diff) = wait_for_activity_to_stabilize(
|
|
|
|
worker,
|
|
|
|
&stats,
|
|
|
|
&filter,
|
2024-03-14 20:19:07 -04:00
|
|
|
&top_level,
|
2024-02-16 16:22:12 -05:00
|
|
|
before,
|
|
|
|
desc.sanitize_ops,
|
|
|
|
desc.sanitize_resources,
|
|
|
|
)
|
|
|
|
.await?
|
|
|
|
{
|
|
|
|
let (formatted, trailer_notes) = format_sanitizer_diff(diff);
|
2024-02-05 14:21:29 -05:00
|
|
|
if !formatted.is_empty() {
|
2024-02-16 16:22:12 -05:00
|
|
|
let failure = TestFailure::Leaked(formatted, trailer_notes);
|
2024-03-13 22:49:54 -04:00
|
|
|
fail_fast_tracker.add_failure();
|
2024-03-11 21:30:15 -04:00
|
|
|
let elapsed = earlier.elapsed().as_millis();
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Result(
|
|
|
|
desc.id,
|
|
|
|
TestResult::Failed(failure),
|
|
|
|
elapsed as u64,
|
|
|
|
),
|
|
|
|
)?;
|
2024-02-05 14:21:29 -05:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-11 21:30:15 -04:00
|
|
|
let elapsed = earlier.elapsed().as_millis();
|
2024-04-16 14:54:50 -04:00
|
|
|
send_test_event(
|
|
|
|
&state_rc,
|
|
|
|
TestEvent::Result(desc.id, result, elapsed as u64),
|
|
|
|
)?;
|
2023-04-13 13:43:23 -04:00
|
|
|
}
|
|
|
|
Ok(())
|
2021-04-28 14:17:04 -04:00
|
|
|
}
|
|
|
|
|
2024-03-14 20:19:07 -04:00
|
|
|
/// The sanitizer must ignore ops, resources and timers that were started at the top-level, but
|
|
|
|
/// completed and restarted, replacing themselves with the same "thing". For example, if you run a
|
|
|
|
/// `Deno.serve` server at the top level and make fetch requests to it during the test, those ops
|
|
|
|
/// should not count as completed during the test because they are immediately replaced.
|
|
|
|
fn is_empty(
|
|
|
|
top_level: &TopLevelSanitizerStats,
|
|
|
|
diff: &RuntimeActivityDiff,
|
|
|
|
) -> bool {
|
|
|
|
// If the diff is empty, return empty
|
|
|
|
if diff.is_empty() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the # of appeared != # of disappeared, we can exit fast with not empty
|
|
|
|
if diff.appeared.len() != diff.disappeared.len() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If there are no top-level ops and !diff.is_empty(), we can exit fast with not empty
|
|
|
|
if top_level.map.is_empty() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Otherwise we need to calculate replacement for top-level stats. Sanitizers will not fire
|
|
|
|
// if an op, resource or timer is replaced and has a corresponding top-level op.
|
|
|
|
let mut map = HashMap::new();
|
|
|
|
for item in &diff.appeared {
|
|
|
|
let item = get_sanitizer_item_ref(item);
|
|
|
|
let Some(n1) = top_level.map.get(&item) else {
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
let n2 = map.entry(item).and_modify(|n| *n += 1).or_insert(1);
|
|
|
|
// If more ops appeared than were created at the top-level, return false
|
|
|
|
if *n2 > *n1 {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We know that we replaced no more things than were created at the top-level. So now we just want
|
|
|
|
// to make sure that whatever thing was created has a corresponding disappearance record.
|
|
|
|
for item in &diff.disappeared {
|
|
|
|
let item = get_sanitizer_item_ref(item);
|
|
|
|
// If more things of this type disappeared than appeared, return false
|
|
|
|
let Some(n1) = map.get_mut(&item) else {
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
*n1 -= 1;
|
|
|
|
if *n1 == 0 {
|
|
|
|
map.remove(&item);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If everything is accounted for, we are empty
|
|
|
|
map.is_empty()
|
|
|
|
}
|
|
|
|
|
2024-02-16 16:22:12 -05:00
|
|
|
async fn wait_for_activity_to_stabilize(
|
|
|
|
worker: &mut MainWorker,
|
|
|
|
stats: &RuntimeActivityStatsFactory,
|
|
|
|
filter: &RuntimeActivityStatsFilter,
|
2024-03-14 20:19:07 -04:00
|
|
|
top_level: &TopLevelSanitizerStats,
|
2024-02-16 16:22:12 -05:00
|
|
|
before: RuntimeActivityStats,
|
|
|
|
sanitize_ops: bool,
|
|
|
|
sanitize_resources: bool,
|
|
|
|
) -> Result<Option<RuntimeActivityDiff>, AnyError> {
|
|
|
|
// First, check to see if there's any diff at all. If not, just continue.
|
|
|
|
let after = stats.clone().capture(filter);
|
|
|
|
let mut diff = RuntimeActivityStats::diff(&before, &after);
|
2024-03-14 20:19:07 -04:00
|
|
|
if is_empty(top_level, &diff) {
|
2024-02-16 16:22:12 -05:00
|
|
|
// No activity, so we return early
|
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
|
|
|
|
// We allow for up to MAX_SANITIZER_LOOP_SPINS to get to a point where there is no difference.
|
|
|
|
// TODO(mmastrac): We could be much smarter about this if we had the concept of "progress" in
|
|
|
|
// an event loop tick. Ideally we'd be able to tell if we were spinning and doing nothing, or
|
|
|
|
// spinning and resolving ops.
|
|
|
|
for _ in 0..MAX_SANITIZER_LOOP_SPINS {
|
|
|
|
// There was a diff, so let the event loop run once
|
|
|
|
poll_event_loop(worker).await?;
|
|
|
|
|
|
|
|
let after = stats.clone().capture(filter);
|
|
|
|
diff = RuntimeActivityStats::diff(&before, &after);
|
2024-03-14 20:19:07 -04:00
|
|
|
if is_empty(top_level, &diff) {
|
2024-02-16 16:22:12 -05:00
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !sanitize_ops {
|
|
|
|
diff
|
|
|
|
.appeared
|
|
|
|
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
|
|
|
|
diff
|
|
|
|
.disappeared
|
|
|
|
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
|
|
|
|
}
|
|
|
|
if !sanitize_resources {
|
|
|
|
diff
|
|
|
|
.appeared
|
|
|
|
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
|
|
|
|
diff
|
|
|
|
.disappeared
|
|
|
|
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
|
|
|
|
}
|
|
|
|
|
2024-03-01 13:15:18 -05:00
|
|
|
// Since we don't have an option to disable timer sanitization, we use sanitize_ops == false &&
|
|
|
|
// sanitize_resources == false to disable those.
|
|
|
|
if !sanitize_ops && !sanitize_resources {
|
|
|
|
diff.appeared.retain(|activity| {
|
|
|
|
!matches!(
|
|
|
|
activity,
|
|
|
|
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
|
|
|
|
)
|
|
|
|
});
|
|
|
|
diff.disappeared.retain(|activity| {
|
|
|
|
!matches!(
|
|
|
|
activity,
|
|
|
|
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
|
|
|
|
)
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-03-14 20:19:07 -04:00
|
|
|
Ok(if is_empty(top_level, &diff) {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(diff)
|
|
|
|
})
|
2024-02-16 16:22:12 -05:00
|
|
|
}
|
|
|
|
|
2021-07-29 15:03:06 -04:00
|
|
|
fn extract_files_from_regex_blocks(
|
2022-01-13 11:58:00 -05:00
|
|
|
specifier: &ModuleSpecifier,
|
2021-07-29 15:03:06 -04:00
|
|
|
source: &str,
|
2021-09-07 10:39:32 -04:00
|
|
|
media_type: MediaType,
|
2022-01-13 11:58:00 -05:00
|
|
|
file_line_index: usize,
|
2021-07-29 15:03:06 -04:00
|
|
|
blocks_regex: &Regex,
|
|
|
|
lines_regex: &Regex,
|
|
|
|
) -> Result<Vec<File>, AnyError> {
|
|
|
|
let files = blocks_regex
|
2021-07-30 09:03:41 -04:00
|
|
|
.captures_iter(source)
|
2021-07-29 15:03:06 -04:00
|
|
|
.filter_map(|block| {
|
2022-11-17 20:59:10 -05:00
|
|
|
block.get(1)?;
|
2022-03-10 20:14:32 -05:00
|
|
|
|
2021-08-14 06:33:58 -04:00
|
|
|
let maybe_attributes: Option<Vec<_>> = block
|
2021-07-29 15:03:06 -04:00
|
|
|
.get(1)
|
2021-08-14 06:33:58 -04:00
|
|
|
.map(|attributes| attributes.as_str().split(' ').collect());
|
|
|
|
|
|
|
|
let file_media_type = if let Some(attributes) = maybe_attributes {
|
|
|
|
if attributes.contains(&"ignore") {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2022-08-21 13:31:14 -04:00
|
|
|
match attributes.first() {
|
2021-08-14 06:33:58 -04:00
|
|
|
Some(&"js") => MediaType::JavaScript,
|
2022-03-07 20:10:40 -05:00
|
|
|
Some(&"javascript") => MediaType::JavaScript,
|
2021-11-01 16:22:27 -04:00
|
|
|
Some(&"mjs") => MediaType::Mjs,
|
|
|
|
Some(&"cjs") => MediaType::Cjs,
|
2021-08-14 06:33:58 -04:00
|
|
|
Some(&"jsx") => MediaType::Jsx,
|
|
|
|
Some(&"ts") => MediaType::TypeScript,
|
2022-03-07 20:10:40 -05:00
|
|
|
Some(&"typescript") => MediaType::TypeScript,
|
2021-11-01 16:22:27 -04:00
|
|
|
Some(&"mts") => MediaType::Mts,
|
|
|
|
Some(&"cts") => MediaType::Cts,
|
2021-08-14 06:33:58 -04:00
|
|
|
Some(&"tsx") => MediaType::Tsx,
|
2021-07-29 15:03:06 -04:00
|
|
|
_ => MediaType::Unknown,
|
|
|
|
}
|
|
|
|
} else {
|
2021-09-07 10:39:32 -04:00
|
|
|
media_type
|
2021-07-29 15:03:06 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
if file_media_type == MediaType::Unknown {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let line_offset = source[0..block.get(0).unwrap().start()]
|
|
|
|
.chars()
|
|
|
|
.filter(|c| *c == '\n')
|
|
|
|
.count();
|
|
|
|
|
|
|
|
let line_count = block.get(0).unwrap().as_str().split('\n').count();
|
|
|
|
|
|
|
|
let body = block.get(2).unwrap();
|
|
|
|
let text = body.as_str();
|
|
|
|
|
|
|
|
// TODO(caspervonb) generate an inline source map
|
|
|
|
let mut file_source = String::new();
|
2021-07-30 09:03:41 -04:00
|
|
|
for line in lines_regex.captures_iter(text) {
|
2021-07-29 15:03:06 -04:00
|
|
|
let text = line.get(1).unwrap();
|
2022-07-01 09:28:06 -04:00
|
|
|
writeln!(file_source, "{}", text.as_str()).unwrap();
|
2021-07-29 15:03:06 -04:00
|
|
|
}
|
|
|
|
|
2023-03-21 11:46:40 -04:00
|
|
|
let file_specifier = ModuleSpecifier::parse(&format!(
|
|
|
|
"{}${}-{}",
|
|
|
|
specifier,
|
|
|
|
file_line_index + line_offset + 1,
|
|
|
|
file_line_index + line_offset + line_count + 1,
|
|
|
|
))
|
2021-07-29 15:03:06 -04:00
|
|
|
.unwrap();
|
2023-03-21 11:46:40 -04:00
|
|
|
let file_specifier =
|
|
|
|
mapped_specifier_for_tsc(&file_specifier, file_media_type)
|
|
|
|
.map(|s| ModuleSpecifier::parse(&s).unwrap())
|
|
|
|
.unwrap_or(file_specifier);
|
2021-07-29 15:03:06 -04:00
|
|
|
|
|
|
|
Some(File {
|
|
|
|
specifier: file_specifier,
|
2021-09-02 11:38:19 -04:00
|
|
|
maybe_headers: None,
|
2024-01-31 22:15:22 -05:00
|
|
|
source: file_source.into_bytes().into(),
|
2021-07-29 15:03:06 -04:00
|
|
|
})
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
Ok(files)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract_files_from_source_comments(
|
|
|
|
specifier: &ModuleSpecifier,
|
2022-05-20 16:40:55 -04:00
|
|
|
source: Arc<str>,
|
2021-09-07 10:39:32 -04:00
|
|
|
media_type: MediaType,
|
2021-07-29 15:03:06 -04:00
|
|
|
) -> Result<Vec<File>, AnyError> {
|
2021-09-07 10:39:32 -04:00
|
|
|
let parsed_source = deno_ast::parse_module(deno_ast::ParseParams {
|
2024-02-08 20:40:26 -05:00
|
|
|
specifier: specifier.clone(),
|
2024-06-05 11:04:16 -04:00
|
|
|
text: source,
|
2021-09-07 10:39:32 -04:00
|
|
|
media_type,
|
|
|
|
capture_tokens: false,
|
|
|
|
maybe_syntax: None,
|
2021-10-12 09:58:04 -04:00
|
|
|
scope_analysis: false,
|
2021-09-07 10:39:32 -04:00
|
|
|
})?;
|
|
|
|
let comments = parsed_source.comments().get_vec();
|
2023-04-12 21:08:01 -04:00
|
|
|
let blocks_regex = lazy_regex::regex!(r"```([^\r\n]*)\r?\n([\S\s]*?)```");
|
|
|
|
let lines_regex = lazy_regex::regex!(r"(?:\* ?)(?:\# ?)?(.*)");
|
2021-07-29 15:03:06 -04:00
|
|
|
|
|
|
|
let files = comments
|
|
|
|
.iter()
|
|
|
|
.filter(|comment| {
|
|
|
|
if comment.kind != CommentKind::Block || !comment.text.starts_with('*') {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
true
|
|
|
|
})
|
|
|
|
.flat_map(|comment| {
|
|
|
|
extract_files_from_regex_blocks(
|
2022-01-13 11:58:00 -05:00
|
|
|
specifier,
|
2021-07-29 15:03:06 -04:00
|
|
|
&comment.text,
|
2021-07-30 09:03:41 -04:00
|
|
|
media_type,
|
2024-06-05 11:04:16 -04:00
|
|
|
parsed_source.text_info_lazy().line_index(comment.start()),
|
2023-04-12 21:08:01 -04:00
|
|
|
blocks_regex,
|
|
|
|
lines_regex,
|
2021-07-29 15:03:06 -04:00
|
|
|
)
|
|
|
|
})
|
|
|
|
.flatten()
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
Ok(files)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract_files_from_fenced_blocks(
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
source: &str,
|
2021-09-07 10:39:32 -04:00
|
|
|
media_type: MediaType,
|
2021-07-29 15:03:06 -04:00
|
|
|
) -> Result<Vec<File>, AnyError> {
|
2022-03-10 20:14:32 -05:00
|
|
|
// The pattern matches code blocks as well as anything in HTML comment syntax,
|
|
|
|
// but it stores the latter without any capturing groups. This way, a simple
|
|
|
|
// check can be done to see if a block is inside a comment (and skip typechecking)
|
|
|
|
// or not by checking for the presence of capturing groups in the matches.
|
|
|
|
let blocks_regex =
|
2023-04-12 21:08:01 -04:00
|
|
|
lazy_regex::regex!(r"(?s)<!--.*?-->|```([^\r\n]*)\r?\n([\S\s]*?)```");
|
|
|
|
let lines_regex = lazy_regex::regex!(r"(?:\# ?)?(.*)");
|
2021-07-29 15:03:06 -04:00
|
|
|
|
|
|
|
extract_files_from_regex_blocks(
|
2022-01-13 11:58:00 -05:00
|
|
|
specifier,
|
2021-07-30 09:03:41 -04:00
|
|
|
source,
|
|
|
|
media_type,
|
2022-01-13 11:58:00 -05:00
|
|
|
/* file line index */ 0,
|
2023-04-12 21:08:01 -04:00
|
|
|
blocks_regex,
|
|
|
|
lines_regex,
|
2021-07-29 15:03:06 -04:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
async fn fetch_inline_files(
|
2023-04-30 16:51:31 -04:00
|
|
|
file_fetcher: &FileFetcher,
|
2021-07-29 15:03:06 -04:00
|
|
|
specifiers: Vec<ModuleSpecifier>,
|
|
|
|
) -> Result<Vec<File>, AnyError> {
|
|
|
|
let mut files = Vec::new();
|
|
|
|
for specifier in specifiers {
|
2023-01-07 11:25:34 -05:00
|
|
|
let fetch_permissions = PermissionsContainer::allow_all();
|
2024-01-31 22:15:22 -05:00
|
|
|
let file = file_fetcher
|
2024-04-18 21:43:28 -04:00
|
|
|
.fetch(&specifier, &fetch_permissions)
|
2024-01-31 22:15:22 -05:00
|
|
|
.await?
|
|
|
|
.into_text_decoded()?;
|
2021-07-29 15:03:06 -04:00
|
|
|
|
|
|
|
let inline_files = if file.media_type == MediaType::Unknown {
|
|
|
|
extract_files_from_fenced_blocks(
|
|
|
|
&file.specifier,
|
|
|
|
&file.source,
|
2021-09-07 10:39:32 -04:00
|
|
|
file.media_type,
|
2021-07-29 15:03:06 -04:00
|
|
|
)
|
|
|
|
} else {
|
|
|
|
extract_files_from_source_comments(
|
|
|
|
&file.specifier,
|
2023-01-13 16:39:19 -05:00
|
|
|
file.source,
|
2021-09-07 10:39:32 -04:00
|
|
|
file.media_type,
|
2021-07-29 15:03:06 -04:00
|
|
|
)
|
|
|
|
};
|
|
|
|
|
|
|
|
files.extend(inline_files?);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(files)
|
|
|
|
}
|
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
/// Type check a collection of module and document specifiers.
|
2022-03-29 18:59:27 -04:00
|
|
|
pub async fn check_specifiers(
|
2023-04-30 16:51:31 -04:00
|
|
|
file_fetcher: &FileFetcher,
|
2024-05-16 03:09:35 -04:00
|
|
|
main_graph_container: &Arc<MainModuleGraphContainer>,
|
2021-08-26 15:21:58 -04:00
|
|
|
specifiers: Vec<(ModuleSpecifier, TestMode)>,
|
2021-07-22 07:34:29 -04:00
|
|
|
) -> Result<(), AnyError> {
|
2021-08-26 15:21:58 -04:00
|
|
|
let inline_files = fetch_inline_files(
|
2023-04-30 16:51:31 -04:00
|
|
|
file_fetcher,
|
2021-08-26 15:21:58 -04:00
|
|
|
specifiers
|
|
|
|
.iter()
|
|
|
|
.filter_map(|(specifier, mode)| {
|
|
|
|
if *mode != TestMode::Executable {
|
|
|
|
Some(specifier.clone())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect(),
|
|
|
|
)
|
|
|
|
.await?;
|
2021-07-05 21:20:33 -04:00
|
|
|
|
2024-05-02 14:13:51 -04:00
|
|
|
let mut module_specifiers = specifiers
|
2023-01-13 16:39:19 -05:00
|
|
|
.into_iter()
|
2021-08-26 15:21:58 -04:00
|
|
|
.filter_map(|(specifier, mode)| {
|
2023-01-13 16:39:19 -05:00
|
|
|
if mode != TestMode::Documentation {
|
|
|
|
Some(specifier)
|
2021-08-26 15:21:58 -04:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
2024-05-02 14:13:51 -04:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
if !inline_files.is_empty() {
|
|
|
|
module_specifiers
|
|
|
|
.extend(inline_files.iter().map(|file| file.specifier.clone()));
|
|
|
|
|
|
|
|
for file in inline_files {
|
|
|
|
file_fetcher.insert_memory_files(file);
|
|
|
|
}
|
|
|
|
}
|
2021-08-12 14:10:14 -04:00
|
|
|
|
2024-05-16 03:09:35 -04:00
|
|
|
main_graph_container
|
|
|
|
.check_specifiers(&module_specifiers)
|
2023-04-14 16:22:33 -04:00
|
|
|
.await?;
|
2021-04-28 14:17:04 -04:00
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
Ok(())
|
|
|
|
}
|
2021-04-28 14:17:04 -04:00
|
|
|
|
2023-04-12 18:51:04 -04:00
|
|
|
static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false);
|
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
/// Test a collection of specifiers with test modes concurrently.
|
|
|
|
async fn test_specifiers(
|
2023-04-27 10:05:20 -04:00
|
|
|
worker_factory: Arc<CliMainWorkerFactory>,
|
2023-01-13 16:39:19 -05:00
|
|
|
permissions: &Permissions,
|
2023-04-13 13:43:23 -04:00
|
|
|
specifiers: Vec<ModuleSpecifier>,
|
2023-04-27 10:05:20 -04:00
|
|
|
options: TestSpecifiersOptions,
|
2021-08-26 15:21:58 -04:00
|
|
|
) -> Result<(), AnyError> {
|
2023-04-27 10:05:20 -04:00
|
|
|
let specifiers = if let Some(seed) = options.specifier.shuffle {
|
2021-08-26 15:21:58 -04:00
|
|
|
let mut rng = SmallRng::seed_from_u64(seed);
|
2023-04-13 13:43:23 -04:00
|
|
|
let mut specifiers = specifiers;
|
|
|
|
specifiers.sort();
|
|
|
|
specifiers.shuffle(&mut rng);
|
|
|
|
specifiers
|
2021-08-26 15:21:58 -04:00
|
|
|
} else {
|
2023-04-13 13:43:23 -04:00
|
|
|
specifiers
|
2021-08-26 15:21:58 -04:00
|
|
|
};
|
2021-04-28 14:17:04 -04:00
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
let (test_event_sender_factory, receiver) = create_test_event_channel();
|
2021-12-30 11:18:30 -05:00
|
|
|
let concurrent_jobs = options.concurrent_jobs;
|
2021-04-28 14:17:04 -04:00
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
let mut cancel_sender = test_event_sender_factory.weak_sender();
|
2023-05-14 17:40:01 -04:00
|
|
|
let sigint_handler_handle = spawn(async move {
|
2023-03-25 15:32:11 -04:00
|
|
|
signal::ctrl_c().await.unwrap();
|
2024-02-23 13:11:15 -05:00
|
|
|
cancel_sender.send(TestEvent::Sigint).ok();
|
2023-03-25 15:32:11 -04:00
|
|
|
});
|
2023-04-12 18:51:04 -04:00
|
|
|
HAS_TEST_RUN_SIGINT_HANDLER.store(true, Ordering::Relaxed);
|
2023-10-05 06:25:15 -04:00
|
|
|
let reporter = get_test_reporter(&options);
|
2023-09-15 11:46:48 -04:00
|
|
|
let fail_fast_tracker = FailFastTracker::new(options.fail_fast);
|
2023-03-25 15:32:11 -04:00
|
|
|
|
2023-04-13 13:43:23 -04:00
|
|
|
let join_handles = specifiers.into_iter().map(move |specifier| {
|
2023-04-27 10:05:20 -04:00
|
|
|
let worker_factory = worker_factory.clone();
|
2023-04-13 13:43:23 -04:00
|
|
|
let permissions = permissions.clone();
|
2024-02-23 13:11:15 -05:00
|
|
|
let worker_sender = test_event_sender_factory.worker();
|
2023-09-15 11:46:48 -04:00
|
|
|
let fail_fast_tracker = fail_fast_tracker.clone();
|
2023-04-27 10:05:20 -04:00
|
|
|
let specifier_options = options.specifier.clone();
|
2023-05-14 17:40:01 -04:00
|
|
|
spawn_blocking(move || {
|
|
|
|
create_and_run_current_thread(test_specifier(
|
|
|
|
worker_factory,
|
2023-04-13 13:43:23 -04:00
|
|
|
permissions,
|
|
|
|
specifier,
|
2024-02-23 13:11:15 -05:00
|
|
|
worker_sender,
|
2023-04-13 13:43:23 -04:00
|
|
|
fail_fast_tracker,
|
2023-05-14 17:40:01 -04:00
|
|
|
specifier_options,
|
2023-04-13 13:43:23 -04:00
|
|
|
))
|
|
|
|
})
|
|
|
|
});
|
2024-02-23 13:11:15 -05:00
|
|
|
|
2021-07-22 07:34:29 -04:00
|
|
|
let join_stream = stream::iter(join_handles)
|
2024-04-15 16:10:09 -04:00
|
|
|
.buffer_unordered(concurrent_jobs.get())
|
2021-04-28 14:17:04 -04:00
|
|
|
.collect::<Vec<Result<Result<(), AnyError>, tokio::task::JoinError>>>();
|
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
let handler = spawn(async move { report_tests(receiver, reporter).await.0 });
|
2022-05-09 05:44:50 -04:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
let (join_results, result) = future::join(join_stream, handler).await;
|
|
|
|
sigint_handler_handle.abort();
|
|
|
|
HAS_TEST_RUN_SIGINT_HANDLER.store(false, Ordering::Relaxed);
|
|
|
|
for join_result in join_results {
|
|
|
|
join_result??;
|
|
|
|
}
|
|
|
|
result??;
|
2021-10-11 09:45:02 -04:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
Ok(())
|
|
|
|
}
|
2021-11-15 10:20:37 -05:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
/// Gives receiver back in case it was ended with `TestEvent::ForceEndReport`.
|
|
|
|
pub async fn report_tests(
|
2024-02-23 13:11:15 -05:00
|
|
|
mut receiver: TestEventReceiver,
|
2023-10-05 06:25:15 -04:00
|
|
|
mut reporter: Box<dyn TestReporter>,
|
2024-02-23 13:11:15 -05:00
|
|
|
) -> (Result<(), AnyError>, TestEventReceiver) {
|
2023-10-05 06:25:15 -04:00
|
|
|
let mut tests = IndexMap::new();
|
|
|
|
let mut test_steps = IndexMap::new();
|
|
|
|
let mut tests_started = HashSet::new();
|
|
|
|
let mut tests_with_result = HashSet::new();
|
|
|
|
let mut start_time = None;
|
|
|
|
let mut had_plan = false;
|
|
|
|
let mut used_only = false;
|
|
|
|
let mut failed = false;
|
|
|
|
|
2024-02-23 13:11:15 -05:00
|
|
|
while let Some((_, event)) = receiver.recv().await {
|
2023-10-05 06:25:15 -04:00
|
|
|
match event {
|
|
|
|
TestEvent::Register(description) => {
|
2024-02-27 22:30:17 -05:00
|
|
|
for (_, description) in description.into_iter() {
|
|
|
|
reporter.report_register(description);
|
|
|
|
// TODO(mmastrac): We shouldn't need to clone here -- we can reuse the descriptions everywhere
|
|
|
|
tests.insert(description.id, description.clone());
|
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
}
|
|
|
|
TestEvent::Plan(plan) => {
|
|
|
|
if !had_plan {
|
|
|
|
start_time = Some(Instant::now());
|
|
|
|
had_plan = true;
|
|
|
|
}
|
|
|
|
if plan.used_only {
|
|
|
|
used_only = true;
|
|
|
|
}
|
|
|
|
reporter.report_plan(&plan);
|
|
|
|
}
|
|
|
|
TestEvent::Wait(id) => {
|
|
|
|
if tests_started.insert(id) {
|
|
|
|
reporter.report_wait(tests.get(&id).unwrap());
|
|
|
|
}
|
|
|
|
}
|
2024-06-14 07:40:57 -04:00
|
|
|
TestEvent::Output(output) => {
|
2023-10-05 06:25:15 -04:00
|
|
|
reporter.report_output(&output);
|
|
|
|
}
|
2024-05-22 10:08:27 -04:00
|
|
|
TestEvent::Slow(id, elapsed) => {
|
|
|
|
reporter.report_slow(tests.get(&id).unwrap(), elapsed);
|
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
TestEvent::Result(id, result, elapsed) => {
|
|
|
|
if tests_with_result.insert(id) {
|
|
|
|
match result {
|
|
|
|
TestResult::Failed(_) | TestResult::Cancelled => {
|
|
|
|
failed = true;
|
2023-07-26 18:12:35 -04:00
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
_ => (),
|
2021-10-11 09:45:02 -04:00
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
reporter.report_result(tests.get(&id).unwrap(), &result, elapsed);
|
2021-07-14 15:05:16 -04:00
|
|
|
}
|
2021-04-28 14:17:04 -04:00
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
TestEvent::UncaughtError(origin, error) => {
|
|
|
|
failed = true;
|
|
|
|
reporter.report_uncaught_error(&origin, error);
|
2023-07-26 18:12:35 -04:00
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
TestEvent::StepRegister(description) => {
|
|
|
|
reporter.report_step_register(&description);
|
|
|
|
test_steps.insert(description.id, description);
|
2021-07-22 07:34:29 -04:00
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
TestEvent::StepWait(id) => {
|
|
|
|
if tests_started.insert(id) {
|
|
|
|
reporter.report_step_wait(test_steps.get(&id).unwrap());
|
|
|
|
}
|
2021-04-28 14:17:04 -04:00
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
TestEvent::StepResult(id, result, duration) => {
|
|
|
|
if tests_with_result.insert(id) {
|
|
|
|
reporter.report_step_result(
|
|
|
|
test_steps.get(&id).unwrap(),
|
|
|
|
&result,
|
|
|
|
duration,
|
|
|
|
&tests,
|
|
|
|
&test_steps,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TestEvent::ForceEndReport => {
|
|
|
|
break;
|
|
|
|
}
|
2024-02-28 17:12:21 -05:00
|
|
|
TestEvent::Completed => {
|
|
|
|
reporter.report_completed();
|
|
|
|
}
|
2023-10-05 06:25:15 -04:00
|
|
|
TestEvent::Sigint => {
|
|
|
|
let elapsed = start_time
|
|
|
|
.map(|t| Instant::now().duration_since(t))
|
|
|
|
.unwrap_or_default();
|
|
|
|
reporter.report_sigint(
|
|
|
|
&tests_started
|
|
|
|
.difference(&tests_with_result)
|
|
|
|
.copied()
|
|
|
|
.collect(),
|
|
|
|
&tests,
|
|
|
|
&test_steps,
|
|
|
|
);
|
2024-05-08 22:45:06 -04:00
|
|
|
|
|
|
|
#[allow(clippy::print_stderr)]
|
2023-10-05 06:25:15 -04:00
|
|
|
if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) {
|
|
|
|
eprint!("Test reporter failed to flush: {}", err)
|
|
|
|
}
|
|
|
|
std::process::exit(130);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-04-28 14:17:04 -04:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
let elapsed = start_time
|
|
|
|
.map(|t| Instant::now().duration_since(t))
|
|
|
|
.unwrap_or_default();
|
|
|
|
reporter.report_summary(&elapsed, &tests, &test_steps);
|
|
|
|
if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) {
|
|
|
|
return (
|
|
|
|
Err(generic_error(format!(
|
|
|
|
"Test reporter failed to flush: {}",
|
|
|
|
err
|
|
|
|
))),
|
|
|
|
receiver,
|
|
|
|
);
|
|
|
|
}
|
2020-10-14 09:19:13 -04:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
if used_only {
|
|
|
|
return (
|
|
|
|
Err(generic_error(
|
|
|
|
"Test failed because the \"only\" option was used",
|
|
|
|
)),
|
|
|
|
receiver,
|
|
|
|
);
|
2021-04-28 14:17:04 -04:00
|
|
|
}
|
2021-07-22 07:34:29 -04:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
if failed {
|
|
|
|
return (Err(generic_error("Test failed")), receiver);
|
|
|
|
}
|
2021-07-22 07:34:29 -04:00
|
|
|
|
2023-10-05 06:25:15 -04:00
|
|
|
(Ok(()), receiver)
|
2020-02-11 06:01:56 -05:00
|
|
|
}
|
2021-08-26 15:21:58 -04:00
|
|
|
|
2024-03-07 20:16:32 -05:00
|
|
|
fn is_supported_test_path_predicate(entry: WalkEntry) -> bool {
|
|
|
|
if !is_script_ext(entry.path) {
|
2024-01-08 12:18:42 -05:00
|
|
|
false
|
2024-03-07 20:16:32 -05:00
|
|
|
} else if has_supported_test_path_name(entry.path) {
|
2024-01-08 12:18:42 -05:00
|
|
|
true
|
2024-03-07 20:16:32 -05:00
|
|
|
} else if let Some(include) = &entry.patterns.include {
|
2024-01-08 12:18:42 -05:00
|
|
|
// allow someone to explicitly specify a path
|
2024-03-07 20:16:32 -05:00
|
|
|
matches_pattern_or_exact_path(include, entry.path)
|
|
|
|
} else {
|
|
|
|
false
|
2024-01-08 12:18:42 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-28 17:28:54 -05:00
|
|
|
/// Checks if the path has a basename and extension Deno supports for tests.
|
2023-04-19 17:30:52 -04:00
|
|
|
pub(crate) fn is_supported_test_path(path: &Path) -> bool {
|
2024-01-08 12:18:42 -05:00
|
|
|
has_supported_test_path_name(path) && is_script_ext(path)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn has_supported_test_path_name(path: &Path) -> bool {
|
2022-11-28 17:28:54 -05:00
|
|
|
if let Some(name) = path.file_stem() {
|
|
|
|
let basename = name.to_string_lossy();
|
2024-07-08 09:19:59 -04:00
|
|
|
if basename.ends_with("_test")
|
2022-11-28 17:28:54 -05:00
|
|
|
|| basename.ends_with(".test")
|
2024-01-08 12:18:42 -05:00
|
|
|
|| basename == "test"
|
2024-07-08 09:19:59 -04:00
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
path
|
|
|
|
.components()
|
|
|
|
.any(|seg| seg.as_os_str().to_str() == Some("__tests__"))
|
2022-11-28 17:28:54 -05:00
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Checks if the path has an extension Deno supports for tests.
|
|
|
|
fn is_supported_test_ext(path: &Path) -> bool {
|
|
|
|
if let Some(ext) = get_extension(path) {
|
|
|
|
matches!(
|
|
|
|
ext.as_str(),
|
|
|
|
"ts"
|
|
|
|
| "tsx"
|
|
|
|
| "js"
|
|
|
|
| "jsx"
|
|
|
|
| "mjs"
|
|
|
|
| "mts"
|
|
|
|
| "cjs"
|
|
|
|
| "cts"
|
|
|
|
| "md"
|
|
|
|
| "mkd"
|
|
|
|
| "mkdn"
|
|
|
|
| "mdwn"
|
|
|
|
| "mdown"
|
|
|
|
| "markdown"
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
/// Collects specifiers marking them with the appropriate test mode while maintaining the natural
|
|
|
|
/// input order.
|
|
|
|
///
|
|
|
|
/// - Specifiers matching the `is_supported_test_ext` predicate are marked as
|
2024-07-29 12:58:04 -04:00
|
|
|
/// `TestMode::Documentation`.
|
2021-08-26 15:21:58 -04:00
|
|
|
/// - Specifiers matching the `is_supported_test_path` are marked as `TestMode::Executable`.
|
|
|
|
/// - Specifiers matching both predicates are marked as `TestMode::Both`
|
|
|
|
fn collect_specifiers_with_test_mode(
|
2024-03-27 14:25:39 -04:00
|
|
|
cli_options: &CliOptions,
|
2024-01-08 12:18:42 -05:00
|
|
|
files: FilePatterns,
|
2023-01-13 16:39:19 -05:00
|
|
|
include_inline: &bool,
|
2021-08-26 15:21:58 -04:00
|
|
|
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
|
2024-01-08 12:18:42 -05:00
|
|
|
// todo(dsherret): there's no need to collect twice as it's slow
|
2024-03-27 14:25:39 -04:00
|
|
|
let vendor_folder = cli_options.vendor_dir_path();
|
|
|
|
let module_specifiers = collect_specifiers(
|
|
|
|
files.clone(),
|
|
|
|
vendor_folder.map(ToOwned::to_owned),
|
|
|
|
is_supported_test_path_predicate,
|
|
|
|
)?;
|
2021-08-26 15:21:58 -04:00
|
|
|
|
2023-01-13 16:39:19 -05:00
|
|
|
if *include_inline {
|
2024-03-27 14:25:39 -04:00
|
|
|
return collect_specifiers(
|
|
|
|
files,
|
|
|
|
vendor_folder.map(ToOwned::to_owned),
|
|
|
|
|e| is_supported_test_ext(e.path),
|
|
|
|
)
|
|
|
|
.map(|specifiers| {
|
|
|
|
specifiers
|
|
|
|
.into_iter()
|
|
|
|
.map(|specifier| {
|
|
|
|
let mode = if module_specifiers.contains(&specifier) {
|
|
|
|
TestMode::Both
|
|
|
|
} else {
|
|
|
|
TestMode::Documentation
|
|
|
|
};
|
|
|
|
|
|
|
|
(specifier, mode)
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
});
|
2021-08-26 15:21:58 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
let specifiers_with_mode = module_specifiers
|
|
|
|
.into_iter()
|
|
|
|
.map(|specifier| (specifier, TestMode::Executable))
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
Ok(specifiers_with_mode)
|
|
|
|
}
|
|
|
|
|
2021-10-10 17:26:22 -04:00
|
|
|
/// Collects module and document specifiers with test modes via
|
|
|
|
/// `collect_specifiers_with_test_mode` which are then pre-fetched and adjusted
|
|
|
|
/// based on the media type.
|
2021-08-26 15:21:58 -04:00
|
|
|
///
|
2021-10-10 17:26:22 -04:00
|
|
|
/// Specifiers that do not have a known media type that can be executed as a
|
|
|
|
/// module are marked as `TestMode::Documentation`. Type definition files
|
|
|
|
/// cannot be run, and therefore need to be marked as `TestMode::Documentation`
|
|
|
|
/// as well.
|
2021-08-26 15:21:58 -04:00
|
|
|
async fn fetch_specifiers_with_test_mode(
|
2024-03-27 14:25:39 -04:00
|
|
|
cli_options: &CliOptions,
|
2023-04-30 16:51:31 -04:00
|
|
|
file_fetcher: &FileFetcher,
|
2024-07-03 20:54:33 -04:00
|
|
|
member_patterns: impl Iterator<Item = FilePatterns>,
|
2023-01-13 16:39:19 -05:00
|
|
|
doc: &bool,
|
2021-08-26 15:21:58 -04:00
|
|
|
) -> Result<Vec<(ModuleSpecifier, TestMode)>, AnyError> {
|
2024-07-03 20:54:33 -04:00
|
|
|
let mut specifiers_with_mode = member_patterns
|
|
|
|
.map(|files| {
|
|
|
|
collect_specifiers_with_test_mode(cli_options, files.clone(), doc)
|
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>, _>>()?
|
|
|
|
.into_iter()
|
|
|
|
.flatten()
|
|
|
|
.collect::<Vec<_>>();
|
2023-01-13 16:39:19 -05:00
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
for (specifier, mode) in &mut specifiers_with_mode {
|
2023-04-30 16:51:31 -04:00
|
|
|
let file = file_fetcher
|
2024-04-18 21:43:28 -04:00
|
|
|
.fetch(specifier, &PermissionsContainer::allow_all())
|
2021-08-26 15:21:58 -04:00
|
|
|
.await?;
|
|
|
|
|
2024-01-31 22:15:22 -05:00
|
|
|
let (media_type, _) = file.resolve_media_type_and_charset();
|
|
|
|
if matches!(media_type, MediaType::Unknown | MediaType::Dts) {
|
2021-08-26 15:21:58 -04:00
|
|
|
*mode = TestMode::Documentation
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(specifiers_with_mode)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn run_tests(
|
2024-07-23 19:00:48 -04:00
|
|
|
flags: Arc<Flags>,
|
2023-06-14 18:29:19 -04:00
|
|
|
test_flags: TestFlags,
|
2021-08-26 15:21:58 -04:00
|
|
|
) -> Result<(), AnyError> {
|
2024-07-23 19:00:48 -04:00
|
|
|
let factory = CliFactory::from_flags(flags);
|
|
|
|
let cli_options = factory.cli_options()?;
|
2024-07-03 20:54:33 -04:00
|
|
|
let workspace_test_options =
|
|
|
|
cli_options.resolve_workspace_test_options(&test_flags);
|
2023-05-01 14:35:23 -04:00
|
|
|
let file_fetcher = factory.file_fetcher()?;
|
2023-01-07 11:25:34 -05:00
|
|
|
// Various test files should not share the same permissions in terms of
|
|
|
|
// `PermissionsContainer` - otherwise granting/revoking permissions in one
|
|
|
|
// file would have impact on other files, which is undesirable.
|
2022-06-29 11:51:11 -04:00
|
|
|
let permissions =
|
2024-05-06 19:21:58 -04:00
|
|
|
Permissions::from_options(&cli_options.permissions_options()?)?;
|
2023-05-01 14:35:23 -04:00
|
|
|
let log_level = cli_options.log_level();
|
2021-08-26 15:21:58 -04:00
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
let members_with_test_options =
|
|
|
|
cli_options.resolve_test_options_for_members(&test_flags)?;
|
2023-01-13 16:39:19 -05:00
|
|
|
let specifiers_with_mode = fetch_specifiers_with_test_mode(
|
2024-03-27 14:25:39 -04:00
|
|
|
cli_options,
|
2023-05-01 14:35:23 -04:00
|
|
|
file_fetcher,
|
2024-07-03 20:54:33 -04:00
|
|
|
members_with_test_options.into_iter().map(|(_, v)| v.files),
|
|
|
|
&workspace_test_options.doc,
|
2023-01-13 16:39:19 -05:00
|
|
|
)
|
|
|
|
.await?;
|
2023-01-07 15:22:09 -05:00
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
if !workspace_test_options.allow_none && specifiers_with_mode.is_empty() {
|
2021-08-26 15:21:58 -04:00
|
|
|
return Err(generic_error("No test modules found"));
|
|
|
|
}
|
|
|
|
|
2024-05-16 03:09:35 -04:00
|
|
|
let main_graph_container = factory.main_module_graph_container().await?;
|
|
|
|
|
2023-04-30 16:51:31 -04:00
|
|
|
check_specifiers(
|
2023-05-01 14:35:23 -04:00
|
|
|
file_fetcher,
|
2024-05-16 03:09:35 -04:00
|
|
|
main_graph_container,
|
2023-04-30 16:51:31 -04:00
|
|
|
specifiers_with_mode.clone(),
|
|
|
|
)
|
|
|
|
.await?;
|
2021-08-26 15:21:58 -04:00
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
if workspace_test_options.no_run {
|
2021-08-26 15:21:58 -04:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2023-05-01 14:35:23 -04:00
|
|
|
let worker_factory =
|
|
|
|
Arc::new(factory.create_cli_main_worker_factory().await?);
|
2023-04-27 10:05:20 -04:00
|
|
|
|
2021-08-26 15:21:58 -04:00
|
|
|
test_specifiers(
|
2023-04-27 10:05:20 -04:00
|
|
|
worker_factory,
|
2023-01-13 16:39:19 -05:00
|
|
|
&permissions,
|
2023-04-13 13:43:23 -04:00
|
|
|
specifiers_with_mode
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|(s, m)| match m {
|
|
|
|
TestMode::Documentation => None,
|
|
|
|
_ => Some(s),
|
|
|
|
})
|
|
|
|
.collect(),
|
2023-04-27 10:05:20 -04:00
|
|
|
TestSpecifiersOptions {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
cwd: Url::from_directory_path(cli_options.initial_cwd()).map_err(
|
|
|
|
|_| {
|
|
|
|
generic_error(format!(
|
|
|
|
"Unable to construct URL from the path of cwd: {}",
|
|
|
|
cli_options.initial_cwd().to_string_lossy(),
|
|
|
|
))
|
|
|
|
},
|
|
|
|
)?,
|
2024-07-03 20:54:33 -04:00
|
|
|
concurrent_jobs: workspace_test_options.concurrent_jobs,
|
|
|
|
fail_fast: workspace_test_options.fail_fast,
|
2023-04-27 10:05:20 -04:00
|
|
|
log_level,
|
2024-07-03 20:54:33 -04:00
|
|
|
filter: workspace_test_options.filter.is_some(),
|
|
|
|
reporter: workspace_test_options.reporter,
|
|
|
|
junit_path: workspace_test_options.junit_path,
|
2023-04-27 10:05:20 -04:00
|
|
|
specifier: TestSpecifierOptions {
|
2024-07-03 20:54:33 -04:00
|
|
|
filter: TestFilter::from_flag(&workspace_test_options.filter),
|
|
|
|
shuffle: workspace_test_options.shuffle,
|
|
|
|
trace_leaks: workspace_test_options.trace_leaks,
|
2023-04-27 10:05:20 -04:00
|
|
|
},
|
2021-12-30 11:18:30 -05:00
|
|
|
},
|
2021-08-26 15:21:58 -04:00
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn run_tests_with_watch(
|
2024-07-23 19:00:48 -04:00
|
|
|
flags: Arc<Flags>,
|
2023-06-14 18:29:19 -04:00
|
|
|
test_flags: TestFlags,
|
2021-08-26 15:21:58 -04:00
|
|
|
) -> Result<(), AnyError> {
|
2023-04-12 18:51:04 -04:00
|
|
|
// On top of the sigint handlers which are added and unbound for each test
|
|
|
|
// run, a process-scoped basic exit handler is required due to a tokio
|
|
|
|
// limitation where it doesn't unbind its own handler for the entire process
|
|
|
|
// once a user adds one.
|
2023-05-14 17:40:01 -04:00
|
|
|
spawn(async move {
|
2023-04-12 18:51:04 -04:00
|
|
|
loop {
|
|
|
|
signal::ctrl_c().await.unwrap();
|
|
|
|
if !HAS_TEST_RUN_SIGINT_HANDLER.load(Ordering::Relaxed) {
|
|
|
|
std::process::exit(130);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2022-01-31 11:39:39 -05:00
|
|
|
file_watcher::watch_func(
|
2023-06-14 18:29:19 -04:00
|
|
|
flags,
|
2023-10-30 20:25:58 -04:00
|
|
|
file_watcher::PrintConfig::new(
|
|
|
|
"Test",
|
|
|
|
test_flags
|
2023-06-15 13:09:37 -04:00
|
|
|
.watch
|
|
|
|
.as_ref()
|
|
|
|
.map(|w| !w.no_clear_screen)
|
|
|
|
.unwrap_or(true),
|
2023-10-30 20:25:58 -04:00
|
|
|
),
|
2023-10-19 01:05:00 -04:00
|
|
|
move |flags, watcher_communicator, changed_paths| {
|
2023-06-14 18:29:19 -04:00
|
|
|
let test_flags = test_flags.clone();
|
|
|
|
Ok(async move {
|
2024-07-23 19:00:48 -04:00
|
|
|
let factory = CliFactory::from_flags_for_watcher(
|
|
|
|
flags,
|
|
|
|
watcher_communicator.clone(),
|
|
|
|
);
|
|
|
|
let cli_options = factory.cli_options()?;
|
2024-07-03 20:54:33 -04:00
|
|
|
let workspace_test_options =
|
|
|
|
cli_options.resolve_workspace_test_options(&test_flags);
|
2023-06-14 18:29:19 -04:00
|
|
|
|
2023-10-19 01:05:00 -04:00
|
|
|
let _ = watcher_communicator.watch_paths(cli_options.watch_paths());
|
2023-06-14 18:29:19 -04:00
|
|
|
let graph_kind = cli_options.type_check_mode().as_graph_kind();
|
|
|
|
let log_level = cli_options.log_level();
|
|
|
|
let cli_options = cli_options.clone();
|
2024-02-20 16:29:57 -05:00
|
|
|
let module_graph_creator = factory.module_graph_creator().await?;
|
2023-06-14 18:29:19 -04:00
|
|
|
let file_fetcher = factory.file_fetcher()?;
|
2024-07-03 20:54:33 -04:00
|
|
|
let members_with_test_options =
|
|
|
|
cli_options.resolve_test_options_for_members(&test_flags)?;
|
|
|
|
let watch_paths = members_with_test_options
|
|
|
|
.iter()
|
|
|
|
.filter_map(|(_, test_options)| {
|
|
|
|
test_options
|
|
|
|
.files
|
|
|
|
.include
|
|
|
|
.as_ref()
|
|
|
|
.map(|set| set.base_paths())
|
|
|
|
})
|
|
|
|
.flatten()
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let _ = watcher_communicator.watch_paths(watch_paths);
|
|
|
|
let test_modules = members_with_test_options
|
|
|
|
.iter()
|
|
|
|
.map(|(_, test_options)| {
|
|
|
|
collect_specifiers(
|
|
|
|
test_options.files.clone(),
|
|
|
|
cli_options.vendor_dir_path().map(ToOwned::to_owned),
|
|
|
|
if workspace_test_options.doc {
|
|
|
|
Box::new(|e: WalkEntry| is_supported_test_ext(e.path))
|
|
|
|
as Box<dyn Fn(WalkEntry) -> bool>
|
|
|
|
} else {
|
|
|
|
Box::new(is_supported_test_path_predicate)
|
|
|
|
},
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect::<Result<Vec<_>, _>>()?
|
|
|
|
.into_iter()
|
|
|
|
.flatten()
|
|
|
|
.collect::<Vec<_>>();
|
2024-01-08 12:18:42 -05:00
|
|
|
|
2023-06-14 18:29:19 -04:00
|
|
|
let permissions =
|
2024-05-06 19:21:58 -04:00
|
|
|
Permissions::from_options(&cli_options.permissions_options()?)?;
|
2024-02-20 16:29:57 -05:00
|
|
|
let graph = module_graph_creator
|
2024-03-07 11:30:30 -05:00
|
|
|
.create_graph(graph_kind, test_modules)
|
2023-06-14 18:29:19 -04:00
|
|
|
.await?;
|
2024-03-07 11:30:30 -05:00
|
|
|
module_graph_creator.graph_valid(&graph)?;
|
|
|
|
let test_modules = &graph.roots;
|
2023-06-14 18:29:19 -04:00
|
|
|
|
|
|
|
let test_modules_to_reload = if let Some(changed_paths) = changed_paths
|
|
|
|
{
|
2024-06-05 11:04:16 -04:00
|
|
|
let mut result = IndexSet::with_capacity(test_modules.len());
|
2024-01-08 12:18:42 -05:00
|
|
|
let changed_paths = changed_paths.into_iter().collect::<HashSet<_>>();
|
2023-06-14 18:29:19 -04:00
|
|
|
for test_module_specifier in test_modules {
|
|
|
|
if has_graph_root_local_dependent_changed(
|
|
|
|
&graph,
|
2024-03-07 11:30:30 -05:00
|
|
|
test_module_specifier,
|
2024-01-08 12:18:42 -05:00
|
|
|
&changed_paths,
|
2023-06-14 18:29:19 -04:00
|
|
|
) {
|
2024-06-05 11:04:16 -04:00
|
|
|
result.insert(test_module_specifier.clone());
|
2023-06-14 18:29:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
result
|
|
|
|
} else {
|
|
|
|
test_modules.clone()
|
|
|
|
};
|
|
|
|
|
|
|
|
let worker_factory =
|
|
|
|
Arc::new(factory.create_cli_main_worker_factory().await?);
|
|
|
|
let specifiers_with_mode = fetch_specifiers_with_test_mode(
|
2024-03-27 14:25:39 -04:00
|
|
|
&cli_options,
|
2023-06-14 18:29:19 -04:00
|
|
|
file_fetcher,
|
2024-07-03 20:54:33 -04:00
|
|
|
members_with_test_options.into_iter().map(|(_, v)| v.files),
|
|
|
|
&workspace_test_options.doc,
|
2023-06-14 18:29:19 -04:00
|
|
|
)
|
|
|
|
.await?
|
|
|
|
.into_iter()
|
|
|
|
.filter(|(specifier, _)| test_modules_to_reload.contains(specifier))
|
|
|
|
.collect::<Vec<(ModuleSpecifier, TestMode)>>();
|
|
|
|
|
2024-05-16 03:09:35 -04:00
|
|
|
let main_graph_container =
|
|
|
|
factory.main_module_graph_container().await?;
|
2023-06-14 18:29:19 -04:00
|
|
|
check_specifiers(
|
|
|
|
file_fetcher,
|
2024-05-16 03:09:35 -04:00
|
|
|
main_graph_container,
|
2023-06-14 18:29:19 -04:00
|
|
|
specifiers_with_mode.clone(),
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
2024-07-03 20:54:33 -04:00
|
|
|
if workspace_test_options.no_run {
|
2023-06-14 18:29:19 -04:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
test_specifiers(
|
|
|
|
worker_factory,
|
|
|
|
&permissions,
|
|
|
|
specifiers_with_mode
|
|
|
|
.into_iter()
|
|
|
|
.filter_map(|(s, m)| match m {
|
|
|
|
TestMode::Documentation => None,
|
|
|
|
_ => Some(s),
|
|
|
|
})
|
|
|
|
.collect(),
|
|
|
|
TestSpecifiersOptions {
|
fix(cli): output more detailed information for steps when using JUnit reporter (#22797)
This patch gets JUnit reporter to output more detailed information for
test steps (subtests).
## Issue with previous implementation
In the previous implementation, the test hierarchy was represented using
several XML tags like the following:
- `<testsuites>` corresponds to the entire test (one execution of `deno
test` has exactly one `<testsuites>` tag)
- `<testsuite>` corresponds to one file, such as `main_test.ts`
- `<testcase>` corresponds to one `Deno.test(...)`
- `<property>` corresponds to one `t.step(...)`
This structure describes the test layers but one problem is that
`<property>` tag is used for any use cases so some tools that can ingest
a JUnit XML file might not be able to interpret `<property>` as
subtests.
## How other tools address it
Some of the testing frameworks in the ecosystem address this issue by
fitting subtests into the `<testcase>` layer. For instance, take a look
at the following Go test file:
```go
package main_test
import "testing"
func TestMain(t *testing.T) {
t.Run("child 1", func(t *testing.T) {
// OK
})
t.Run("child 2", func(t *testing.T) {
// Error
t.Fatal("error")
})
}
```
Running [gotestsum], we can get the output like this:
```xml
<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="3" failures="2" errors="0" time="1.013694">
<testsuite tests="3" failures="2" time="0.510000" name="example/gosumtest" timestamp="2024-03-11T12:26:39+09:00">
<properties>
<property name="go.version" value="go1.22.1 darwin/arm64"></property>
</properties>
<testcase classname="example/gosumtest" name="TestMain/child_2" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain/child_2
 main_test.go:12: error
--- FAIL: TestMain/child_2 (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain" time="0.000000">
<failure message="Failed" type="">=== RUN TestMain
--- FAIL: TestMain (0.00s)
</failure>
</testcase>
<testcase classname="example/gosumtest" name="TestMain/child_1" time="0.000000"></testcase>
</testsuite>
</testsuites>
```
This output shows that nested test cases are squashed into the
`<testcase>` layer by treating them as the same layer as their parent,
`TestMain`. We can still distinguish nested ones by their `name`
attributes that look like `TestMain/<subtest_name>`.
As described in #22795, [vitest] solves the issue in the same way as
[gotestsum].
One downside of this would be that one test failure that happens in a
nested test case will end up being counted multiple times, because not
only the subtest but also its wrapping container(s) are considered to be
failures. In fact, in the [gotestsum] output above, `TestMain/child_2`
failed (which is totally expected) while its parent, `TestMain`, was
also counted as failure. As
https://github.com/denoland/deno/pull/20273#discussion_r1307558757
pointed out, there is a test runner that offers flexibility to prevent
this, but I personally don't think the "duplicate failure count" issue
is a big deal.
## How to fix the issue in this patch
This patch fixes the issue with the same approach as [gotestsum] and
[vitest].
More specifically, nested test cases are put into the `<testcase>` level
and their names are now represented as squashed test names concatenated
by `>` (e.g. `parent 2 > child 1 > grandchild 1`). This change also
allows us to put a detailed error message as `<failure>` tag within the
`<testcase>` tag, which should be handled nicely by third-party tools
supporting JUnit XML.
## Extra fix
Also, file paths embedded into XML outputs are changed from absolute
path to relative path, which is helpful when running the test suites in
several different environments like CI.
Resolves #22795
[gotestsum]: https://github.com/gotestyourself/gotestsum
[vitest]: https://vitest.dev/
---------
Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
2024-03-25 11:08:46 -04:00
|
|
|
cwd: Url::from_directory_path(cli_options.initial_cwd()).map_err(
|
|
|
|
|_| {
|
|
|
|
generic_error(format!(
|
|
|
|
"Unable to construct URL from the path of cwd: {}",
|
|
|
|
cli_options.initial_cwd().to_string_lossy(),
|
|
|
|
))
|
|
|
|
},
|
|
|
|
)?,
|
2024-07-03 20:54:33 -04:00
|
|
|
concurrent_jobs: workspace_test_options.concurrent_jobs,
|
|
|
|
fail_fast: workspace_test_options.fail_fast,
|
2023-06-14 18:29:19 -04:00
|
|
|
log_level,
|
2024-07-03 20:54:33 -04:00
|
|
|
filter: workspace_test_options.filter.is_some(),
|
|
|
|
reporter: workspace_test_options.reporter,
|
|
|
|
junit_path: workspace_test_options.junit_path,
|
2023-06-14 18:29:19 -04:00
|
|
|
specifier: TestSpecifierOptions {
|
2024-07-03 20:54:33 -04:00
|
|
|
filter: TestFilter::from_flag(&workspace_test_options.filter),
|
|
|
|
shuffle: workspace_test_options.shuffle,
|
|
|
|
trace_leaks: workspace_test_options.trace_leaks,
|
2023-06-14 18:29:19 -04:00
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
},
|
2022-01-31 11:39:39 -05:00
|
|
|
)
|
|
|
|
.await?;
|
2021-08-26 15:21:58 -04:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-04-26 19:00:04 -04:00
|
|
|
|
2022-12-05 16:17:49 -05:00
|
|
|
/// Tracks failures for the `--fail-fast` argument in
|
|
|
|
/// order to tell when to stop running tests.
|
2023-10-05 06:25:15 -04:00
|
|
|
#[derive(Clone, Default)]
|
2022-12-05 16:17:49 -05:00
|
|
|
pub struct FailFastTracker {
|
|
|
|
max_count: Option<usize>,
|
|
|
|
failure_count: Arc<AtomicUsize>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FailFastTracker {
|
|
|
|
pub fn new(fail_fast: Option<NonZeroUsize>) -> Self {
|
|
|
|
Self {
|
|
|
|
max_count: fail_fast.map(|v| v.into()),
|
|
|
|
failure_count: Default::default(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn add_failure(&self) -> bool {
|
|
|
|
if let Some(max_count) = &self.max_count {
|
|
|
|
self
|
|
|
|
.failure_count
|
|
|
|
.fetch_add(1, std::sync::atomic::Ordering::SeqCst)
|
|
|
|
>= *max_count
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn should_stop(&self) -> bool {
|
|
|
|
if let Some(max_count) = &self.max_count {
|
|
|
|
self.failure_count.load(std::sync::atomic::Ordering::SeqCst) >= *max_count
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-28 17:28:54 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod inner_test {
|
|
|
|
use std::path::Path;
|
|
|
|
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_is_supported_test_ext() {
|
|
|
|
assert!(!is_supported_test_ext(Path::new("tests/subdir/redirects")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("README.md")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("readme.MD")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("lib/typescript.d.ts")));
|
|
|
|
assert!(is_supported_test_ext(Path::new(
|
|
|
|
"testdata/run/001_hello.js"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_ext(Path::new(
|
|
|
|
"testdata/run/002_hello.ts"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.jsx")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.tsx")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.TS")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.TSX")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.JS")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.JSX")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.mjs")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.mts")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.cjs")));
|
|
|
|
assert!(is_supported_test_ext(Path::new("foo.cts")));
|
|
|
|
assert!(!is_supported_test_ext(Path::new("foo.mjsx")));
|
|
|
|
assert!(!is_supported_test_ext(Path::new("foo.jsonc")));
|
|
|
|
assert!(!is_supported_test_ext(Path::new("foo.JSONC")));
|
|
|
|
assert!(!is_supported_test_ext(Path::new("foo.json")));
|
|
|
|
assert!(!is_supported_test_ext(Path::new("foo.JsON")));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_is_supported_test_path() {
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"tests/subdir/foo_test.ts"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"tests/subdir/foo_test.tsx"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"tests/subdir/foo_test.js"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"tests/subdir/foo_test.jsx"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new("bar/foo.test.ts")));
|
|
|
|
assert!(is_supported_test_path(Path::new("bar/foo.test.tsx")));
|
|
|
|
assert!(is_supported_test_path(Path::new("bar/foo.test.js")));
|
|
|
|
assert!(is_supported_test_path(Path::new("bar/foo.test.jsx")));
|
|
|
|
assert!(is_supported_test_path(Path::new("foo/bar/test.js")));
|
|
|
|
assert!(is_supported_test_path(Path::new("foo/bar/test.jsx")));
|
|
|
|
assert!(is_supported_test_path(Path::new("foo/bar/test.ts")));
|
|
|
|
assert!(is_supported_test_path(Path::new("foo/bar/test.tsx")));
|
2024-07-08 09:19:59 -04:00
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"foo/bar/__tests__/foo.js"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"foo/bar/__tests__/foo.jsx"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"foo/bar/__tests__/foo.ts"
|
|
|
|
)));
|
|
|
|
assert!(is_supported_test_path(Path::new(
|
|
|
|
"foo/bar/__tests__/foo.tsx"
|
|
|
|
)));
|
2022-11-28 17:28:54 -05:00
|
|
|
assert!(!is_supported_test_path(Path::new("README.md")));
|
|
|
|
assert!(!is_supported_test_path(Path::new("lib/typescript.d.ts")));
|
|
|
|
assert!(!is_supported_test_path(Path::new("notatest.js")));
|
|
|
|
assert!(!is_supported_test_path(Path::new("NotAtest.ts")));
|
|
|
|
}
|
|
|
|
}
|