mirror of
https://github.com/denoland/deno.git
synced 2024-12-22 07:14:47 -05:00
This reverts commit d5b38a9929
.
This commit is contained in:
parent
b686907a45
commit
4e3068be63
8 changed files with 364 additions and 489 deletions
|
@ -19,7 +19,6 @@
|
|||
"cli/dts/lib.scripthost.d.ts",
|
||||
"cli/dts/lib.webworker*.d.ts",
|
||||
"cli/dts/typescript.d.ts",
|
||||
"cli/tests/testdata/coverage/complex.ts",
|
||||
"cli/tests/testdata/encoding",
|
||||
"cli/tests/testdata/inline_js_source_map*",
|
||||
"cli/tests/testdata/badly_formatted.md",
|
||||
|
|
|
@ -1072,7 +1072,7 @@ async fn coverage_command(
|
|||
return Err(generic_error("No matching coverage profiles found"));
|
||||
}
|
||||
|
||||
tools::coverage::cover_scripts(
|
||||
tools::coverage::cover_files(
|
||||
flags.clone(),
|
||||
coverage_flags.files,
|
||||
coverage_flags.ignore,
|
||||
|
|
3
cli/tests/testdata/coverage/complex.ts
vendored
3
cli/tests/testdata/coverage/complex.ts
vendored
|
@ -69,6 +69,3 @@ export function ƒ(): number {
|
|||
|
||||
// This arrow function should also show up as uncovered.
|
||||
console.log("%s", () => 1);
|
||||
|
||||
// End with a newline:
|
||||
|
||||
|
|
13
cli/tests/testdata/coverage/expected_branch.lcov
vendored
13
cli/tests/testdata/coverage/expected_branch.lcov
vendored
|
@ -2,18 +2,19 @@ SF:[WILDCARD]branch.ts
|
|||
FN:2,branch
|
||||
FN:10,unused
|
||||
FNDA:1,branch
|
||||
FNDA:0,unused
|
||||
FNF:2
|
||||
FNH:1
|
||||
BRDA:5,1,0,0
|
||||
BRDA:4,1,0,0
|
||||
BRF:1
|
||||
BRH:0
|
||||
DA:1,4
|
||||
DA:2,4
|
||||
DA:3,4
|
||||
DA:1,1
|
||||
DA:2,2
|
||||
DA:3,2
|
||||
DA:4,0
|
||||
DA:5,0
|
||||
DA:6,0
|
||||
DA:7,2
|
||||
DA:7,1
|
||||
DA:9,0
|
||||
DA:10,0
|
||||
DA:11,0
|
||||
|
@ -21,6 +22,6 @@ DA:12,0
|
|||
DA:13,0
|
||||
DA:14,0
|
||||
DA:15,0
|
||||
LF:14
|
||||
LH:4
|
||||
LF:14
|
||||
end_of_record
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
SF:[WILDCARD]complex.ts
|
||||
FN:18,dependency
|
||||
FN:33,complex
|
||||
FN:47,unused
|
||||
FN:22,dependency
|
||||
FN:37,complex
|
||||
FN:51,unused
|
||||
FN:65,ƒ
|
||||
FNDA:1,dependency
|
||||
FNDA:1,complex
|
||||
FNDA:0,unused
|
||||
FNDA:0,ƒ
|
||||
FNF:4
|
||||
FNH:2
|
||||
BRF:0
|
||||
|
@ -12,30 +14,27 @@ BRH:0
|
|||
DA:17,2
|
||||
DA:18,2
|
||||
DA:19,2
|
||||
DA:20,4
|
||||
DA:21,2
|
||||
DA:22,4
|
||||
DA:23,4
|
||||
DA:24,4
|
||||
DA:25,4
|
||||
DA:20,2
|
||||
DA:22,2
|
||||
DA:23,2
|
||||
DA:24,2
|
||||
DA:25,2
|
||||
DA:26,2
|
||||
DA:27,2
|
||||
DA:32,2
|
||||
DA:33,2
|
||||
DA:34,2
|
||||
DA:35,4
|
||||
DA:36,2
|
||||
DA:27,1
|
||||
DA:32,1
|
||||
DA:33,1
|
||||
DA:34,1
|
||||
DA:35,1
|
||||
DA:37,2
|
||||
DA:38,2
|
||||
DA:39,2
|
||||
DA:40,2
|
||||
DA:41,2
|
||||
DA:42,2
|
||||
DA:42,1
|
||||
DA:46,0
|
||||
DA:47,0
|
||||
DA:48,0
|
||||
DA:49,0
|
||||
DA:50,0
|
||||
DA:51,0
|
||||
DA:52,0
|
||||
DA:53,0
|
||||
|
@ -49,6 +48,6 @@ DA:66,0
|
|||
DA:67,0
|
||||
DA:68,1
|
||||
DA:71,0
|
||||
LF:40
|
||||
LH:24
|
||||
LH:22
|
||||
LF:37
|
||||
end_of_record
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
cover [WILDCARD]complex.ts ... 60.000% (24/40)
|
||||
cover [WILDCARD]/coverage/complex.ts ... 59.459% (22/37)
|
||||
46 | export function unused(
|
||||
47 | foo: string,
|
||||
48 | bar: string,
|
||||
49 | baz: string,
|
||||
50 | ): Complex {
|
||||
-----|-----
|
||||
51 | return complex(
|
||||
52 | foo,
|
||||
53 | bar,
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
[WILDCARD]
|
||||
error: Found 7 not formatted files in [WILDCARD] files
|
||||
error: Found 6 not formatted files in [WILDCARD] files
|
||||
|
|
|
@ -6,29 +6,30 @@ use crate::fs_util::collect_files;
|
|||
use crate::module_graph::TypeLib;
|
||||
use crate::proc_state::ProcState;
|
||||
use crate::source_maps::SourceMapGetter;
|
||||
use deno_ast::swc::common::Span;
|
||||
use deno_ast::MediaType;
|
||||
use deno_core::error::AnyError;
|
||||
use deno_core::resolve_url_or_path;
|
||||
use deno_core::serde_json;
|
||||
use deno_core::url::Url;
|
||||
use deno_core::LocalInspectorSession;
|
||||
use deno_core::ModuleSpecifier;
|
||||
use deno_runtime::permissions::Permissions;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use sourcemap::SourceMap;
|
||||
use std::cmp;
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
// TODO(caspervonb) These structs are specific to the inspector protocol and should be refactored
|
||||
// into a reusable module.
|
||||
// TODO(caspervonb) all of these structs can and should be made private, possibly moved to
|
||||
// inspector::protocol.
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CoverageRange {
|
||||
pub struct CoverageRange {
|
||||
pub start_offset: usize,
|
||||
pub end_offset: usize,
|
||||
pub count: usize,
|
||||
|
@ -36,36 +37,23 @@ struct CoverageRange {
|
|||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct FunctionCoverage {
|
||||
pub struct FunctionCoverage {
|
||||
pub function_name: String,
|
||||
pub ranges: Vec<CoverageRange>,
|
||||
pub is_block_coverage: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
struct LineCoverage {
|
||||
pub start_offset: usize,
|
||||
pub end_offset: usize,
|
||||
pub ranges: Vec<CoverageRange>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ScriptCoverage {
|
||||
pub struct ScriptCoverage {
|
||||
pub script_id: String,
|
||||
pub url: String,
|
||||
pub functions: Vec<FunctionCoverage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
struct CoverageResult {
|
||||
pub lines: Vec<LineCoverage>,
|
||||
pub functions: Vec<FunctionCoverage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct StartPreciseCoverageParameters {
|
||||
pub struct StartPreciseCoverageParameters {
|
||||
pub call_count: bool,
|
||||
pub detailed: bool,
|
||||
pub allow_triggered_updates: bool,
|
||||
|
@ -73,13 +61,13 @@ struct StartPreciseCoverageParameters {
|
|||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct StartPreciseCoverageReturnObject {
|
||||
pub struct StartPreciseCoverageReturnObject {
|
||||
pub timestamp: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct TakePreciseCoverageReturnObject {
|
||||
pub struct TakePreciseCoverageReturnObject {
|
||||
pub result: Vec<ScriptCoverage>,
|
||||
pub timestamp: f64,
|
||||
}
|
||||
|
@ -181,7 +169,7 @@ impl CoverageCollector {
|
|||
}
|
||||
}
|
||||
|
||||
enum CoverageReporterKind {
|
||||
pub enum CoverageReporterKind {
|
||||
Pretty,
|
||||
Lcov,
|
||||
}
|
||||
|
@ -190,18 +178,21 @@ fn create_reporter(
|
|||
kind: CoverageReporterKind,
|
||||
) -> Box<dyn CoverageReporter + Send> {
|
||||
match kind {
|
||||
CoverageReporterKind::Pretty => Box::new(PrettyCoverageReporter::new()),
|
||||
CoverageReporterKind::Lcov => Box::new(LcovCoverageReporter::new()),
|
||||
CoverageReporterKind::Pretty => Box::new(PrettyCoverageReporter::new()),
|
||||
}
|
||||
}
|
||||
|
||||
trait CoverageReporter {
|
||||
fn report_result(
|
||||
pub trait CoverageReporter {
|
||||
fn visit_coverage(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
result: &CoverageResult,
|
||||
source: &str,
|
||||
script_coverage: &ScriptCoverage,
|
||||
script_source: &str,
|
||||
maybe_source_map: Option<Vec<u8>>,
|
||||
maybe_original_source: Option<Arc<String>>,
|
||||
);
|
||||
|
||||
fn done(&mut self);
|
||||
}
|
||||
|
||||
pub struct LcovCoverageReporter {}
|
||||
|
@ -213,45 +204,86 @@ impl LcovCoverageReporter {
|
|||
}
|
||||
|
||||
impl CoverageReporter for LcovCoverageReporter {
|
||||
fn report_result(
|
||||
fn visit_coverage(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
result: &CoverageResult,
|
||||
source: &str,
|
||||
script_coverage: &ScriptCoverage,
|
||||
script_source: &str,
|
||||
maybe_source_map: Option<Vec<u8>>,
|
||||
_maybe_original_source: Option<Arc<String>>,
|
||||
) {
|
||||
println!("SF:{}", specifier.to_file_path().unwrap().to_str().unwrap());
|
||||
// TODO(caspervonb) cleanup and reduce duplication between reporters, pre-compute line coverage
|
||||
// elsewhere.
|
||||
let maybe_source_map = maybe_source_map
|
||||
.map(|source_map| SourceMap::from_slice(&source_map).unwrap());
|
||||
|
||||
let named_functions = result
|
||||
.functions
|
||||
.iter()
|
||||
.filter(|block| !block.function_name.is_empty())
|
||||
.collect::<Vec<&FunctionCoverage>>();
|
||||
let url = Url::parse(&script_coverage.url).unwrap();
|
||||
let file_path = url.to_file_path().unwrap();
|
||||
println!("SF:{}", file_path.to_str().unwrap());
|
||||
|
||||
for block in &named_functions {
|
||||
let index = source[0..block.ranges[0].start_offset].split('\n').count();
|
||||
|
||||
println!("FN:{},{}", index + 1, block.function_name);
|
||||
let mut functions_found = 0;
|
||||
for function in &script_coverage.functions {
|
||||
if function.function_name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let hit_functions = named_functions
|
||||
.iter()
|
||||
.filter(|block| block.ranges[0].count > 0)
|
||||
.cloned()
|
||||
.collect::<Vec<&FunctionCoverage>>();
|
||||
let source_line = script_source[0..function.ranges[0].start_offset]
|
||||
.split('\n')
|
||||
.count();
|
||||
|
||||
for block in &hit_functions {
|
||||
println!("FNDA:{},{}", block.ranges[0].count, block.function_name);
|
||||
let line_index = if let Some(source_map) = maybe_source_map.as_ref() {
|
||||
source_map
|
||||
.tokens()
|
||||
.find(|token| token.get_dst_line() as usize == source_line)
|
||||
.map(|token| token.get_src_line() as usize)
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
source_line
|
||||
};
|
||||
|
||||
let function_name = &function.function_name;
|
||||
|
||||
println!("FN:{},{}", line_index + 1, function_name);
|
||||
|
||||
functions_found += 1;
|
||||
}
|
||||
|
||||
println!("FNF:{}", named_functions.len());
|
||||
println!("FNH:{}", hit_functions.len());
|
||||
let mut functions_hit = 0;
|
||||
for function in &script_coverage.functions {
|
||||
if function.function_name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let execution_count = function.ranges[0].count;
|
||||
let function_name = &function.function_name;
|
||||
|
||||
println!("FNDA:{},{}", execution_count, function_name);
|
||||
|
||||
if execution_count != 0 {
|
||||
functions_hit += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!("FNF:{}", functions_found);
|
||||
println!("FNH:{}", functions_hit);
|
||||
|
||||
let mut branches_found = 0;
|
||||
let mut branches_hit = 0;
|
||||
for (block_number, block) in result.functions.iter().enumerate() {
|
||||
let block_hits = block.ranges[0].count;
|
||||
for (branch_number, range) in block.ranges[1..].iter().enumerate() {
|
||||
let line_index = source[0..range.start_offset].split('\n').count();
|
||||
for (block_number, function) in script_coverage.functions.iter().enumerate()
|
||||
{
|
||||
let block_hits = function.ranges[0].count;
|
||||
for (branch_number, range) in function.ranges[1..].iter().enumerate() {
|
||||
let source_line =
|
||||
script_source[0..range.start_offset].split('\n').count();
|
||||
|
||||
let line_index = if let Some(source_map) = maybe_source_map.as_ref() {
|
||||
source_map
|
||||
.tokens()
|
||||
.find(|token| token.get_dst_line() as usize == source_line)
|
||||
.map(|token| token.get_src_line() as usize)
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
source_line
|
||||
};
|
||||
|
||||
// From https://manpages.debian.org/unstable/lcov/geninfo.1.en.html:
|
||||
//
|
||||
|
@ -286,49 +318,97 @@ impl CoverageReporter for LcovCoverageReporter {
|
|||
println!("BRF:{}", branches_found);
|
||||
println!("BRH:{}", branches_hit);
|
||||
|
||||
let enumerated_lines = result
|
||||
.lines
|
||||
.iter()
|
||||
.enumerate()
|
||||
.collect::<Vec<(usize, &LineCoverage)>>();
|
||||
let lines = script_source.split('\n').collect::<Vec<_>>();
|
||||
let line_offsets = {
|
||||
let mut offsets: Vec<(usize, usize)> = Vec::new();
|
||||
let mut index = 0;
|
||||
|
||||
for (index, line) in &enumerated_lines {
|
||||
if line.ranges.is_empty() {
|
||||
for line in &lines {
|
||||
offsets.push((index, index + line.len() + 1));
|
||||
index += line.len() + 1;
|
||||
}
|
||||
|
||||
offsets
|
||||
};
|
||||
|
||||
let line_counts = line_offsets
|
||||
.iter()
|
||||
.map(|(line_start_offset, line_end_offset)| {
|
||||
let mut count = 0;
|
||||
|
||||
// Count the hits of ranges that include the entire line which will always be at-least one
|
||||
// as long as the code has been evaluated.
|
||||
for function in &script_coverage.functions {
|
||||
for range in &function.ranges {
|
||||
if range.start_offset <= *line_start_offset
|
||||
&& range.end_offset >= *line_end_offset
|
||||
{
|
||||
count += range.count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We reset the count if any block with a zero count overlaps with the line range.
|
||||
for function in &script_coverage.functions {
|
||||
for range in &function.ranges {
|
||||
if range.count > 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut count = 0;
|
||||
for range in &line.ranges {
|
||||
count += range.count;
|
||||
let overlaps = std::cmp::max(line_end_offset, &range.end_offset)
|
||||
- std::cmp::min(line_start_offset, &range.start_offset)
|
||||
< (line_end_offset - line_start_offset)
|
||||
+ (range.end_offset - range.start_offset);
|
||||
|
||||
if range.count == 0 {
|
||||
if overlaps {
|
||||
count = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
count
|
||||
})
|
||||
.collect::<Vec<usize>>();
|
||||
|
||||
let found_lines = if let Some(source_map) = maybe_source_map.as_ref() {
|
||||
let mut found_lines = line_counts
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, count)| {
|
||||
source_map
|
||||
.tokens()
|
||||
.filter(move |token| token.get_dst_line() as usize == index)
|
||||
.map(move |token| (token.get_src_line() as usize, *count))
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<(usize, usize)>>();
|
||||
|
||||
found_lines.sort_unstable_by_key(|(index, _)| *index);
|
||||
found_lines.dedup_by_key(|(index, _)| *index);
|
||||
found_lines
|
||||
} else {
|
||||
line_counts
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, count)| (index, *count))
|
||||
.collect::<Vec<(usize, usize)>>()
|
||||
};
|
||||
|
||||
for (index, count) in &found_lines {
|
||||
println!("DA:{},{}", index + 1, count);
|
||||
}
|
||||
|
||||
let lines_found = enumerated_lines
|
||||
.iter()
|
||||
.filter(|(_, line)| !line.ranges.is_empty())
|
||||
.count();
|
||||
|
||||
println!("LF:{}", lines_found);
|
||||
|
||||
let lines_hit = enumerated_lines
|
||||
.iter()
|
||||
.filter(|(_, line)| {
|
||||
!line.ranges.is_empty()
|
||||
&& !line.ranges.iter().any(|range| range.count == 0)
|
||||
})
|
||||
.count();
|
||||
let lines_hit = found_lines.iter().filter(|(_, count)| *count != 0).count();
|
||||
|
||||
println!("LH:{}", lines_hit);
|
||||
|
||||
let lines_found = found_lines.len();
|
||||
println!("LF:{}", lines_found);
|
||||
|
||||
println!("end_of_record");
|
||||
}
|
||||
|
||||
fn done(&mut self) {}
|
||||
}
|
||||
|
||||
pub struct PrettyCoverageReporter {}
|
||||
|
@ -339,45 +419,135 @@ impl PrettyCoverageReporter {
|
|||
}
|
||||
}
|
||||
|
||||
const PRETTY_LINE_WIDTH: usize = 4;
|
||||
const PRETTY_LINE_SEPERATOR: &str = "|";
|
||||
impl CoverageReporter for PrettyCoverageReporter {
|
||||
fn report_result(
|
||||
fn visit_coverage(
|
||||
&mut self,
|
||||
specifier: &ModuleSpecifier,
|
||||
result: &CoverageResult,
|
||||
source: &str,
|
||||
script_coverage: &ScriptCoverage,
|
||||
script_source: &str,
|
||||
maybe_source_map: Option<Vec<u8>>,
|
||||
maybe_original_source: Option<Arc<String>>,
|
||||
) {
|
||||
print!("cover {} ... ", specifier);
|
||||
let maybe_source_map = maybe_source_map
|
||||
.map(|source_map| SourceMap::from_slice(&source_map).unwrap());
|
||||
|
||||
let enumerated_lines = result
|
||||
.lines
|
||||
let mut ignored_spans: Vec<Span> = Vec::new();
|
||||
for item in deno_ast::lex(script_source, MediaType::JavaScript) {
|
||||
if let deno_ast::TokenOrComment::Token(_) = item.inner {
|
||||
continue;
|
||||
}
|
||||
|
||||
ignored_spans.push(item.span);
|
||||
}
|
||||
|
||||
let lines = script_source.split('\n').collect::<Vec<_>>();
|
||||
|
||||
let line_offsets = {
|
||||
let mut offsets: Vec<(usize, usize)> = Vec::new();
|
||||
let mut index = 0;
|
||||
|
||||
for line in &lines {
|
||||
offsets.push((index, index + line.len() + 1));
|
||||
index += line.len() + 1;
|
||||
}
|
||||
|
||||
offsets
|
||||
};
|
||||
|
||||
// TODO(caspervonb): collect uncovered ranges on the lines so that we can highlight specific
|
||||
// parts of a line in color (word diff style) instead of the entire line.
|
||||
let line_counts = line_offsets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.collect::<Vec<(usize, &LineCoverage)>>();
|
||||
.map(|(index, (line_start_offset, line_end_offset))| {
|
||||
let ignore = ignored_spans.iter().any(|span| {
|
||||
(span.lo.0 as usize) <= *line_start_offset
|
||||
&& (span.hi.0 as usize) >= *line_end_offset
|
||||
});
|
||||
|
||||
let found_lines = enumerated_lines
|
||||
.iter()
|
||||
.filter(|(_, coverage)| !coverage.ranges.is_empty())
|
||||
.cloned()
|
||||
.collect::<Vec<(usize, &LineCoverage)>>();
|
||||
if ignore {
|
||||
return (index, 1);
|
||||
}
|
||||
|
||||
let missed_lines = found_lines
|
||||
.iter()
|
||||
.filter(|(_, coverage)| {
|
||||
coverage.ranges.iter().any(|range| range.count == 0)
|
||||
let mut count = 0;
|
||||
|
||||
// Count the hits of ranges that include the entire line which will always be at-least one
|
||||
// as long as the code has been evaluated.
|
||||
for function in &script_coverage.functions {
|
||||
for range in &function.ranges {
|
||||
if range.start_offset <= *line_start_offset
|
||||
&& range.end_offset >= *line_end_offset
|
||||
{
|
||||
count += range.count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We reset the count if any block with a zero count overlaps with the line range.
|
||||
for function in &script_coverage.functions {
|
||||
for range in &function.ranges {
|
||||
if range.count > 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let overlaps = std::cmp::max(line_end_offset, &range.end_offset)
|
||||
- std::cmp::min(line_start_offset, &range.start_offset)
|
||||
< (line_end_offset - line_start_offset)
|
||||
+ (range.end_offset - range.start_offset);
|
||||
|
||||
if overlaps {
|
||||
count = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(index, count)
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<(usize, &LineCoverage)>>();
|
||||
.collect::<Vec<(usize, usize)>>();
|
||||
|
||||
let line_ratio = (found_lines.len() - missed_lines.len()) as f32
|
||||
/ found_lines.len() as f32;
|
||||
let line_coverage = format!(
|
||||
"{:.3}% ({}/{})",
|
||||
line_ratio * 100.0,
|
||||
found_lines.len() - missed_lines.len(),
|
||||
found_lines.len()
|
||||
);
|
||||
let lines = if let Some(original_source) = maybe_original_source.as_ref() {
|
||||
original_source.split('\n').collect::<Vec<_>>()
|
||||
} else {
|
||||
lines
|
||||
};
|
||||
|
||||
let line_counts = if let Some(source_map) = maybe_source_map.as_ref() {
|
||||
let mut line_counts = line_counts
|
||||
.iter()
|
||||
.map(|(index, count)| {
|
||||
source_map
|
||||
.tokens()
|
||||
.filter(move |token| token.get_dst_line() as usize == *index)
|
||||
.map(move |token| (token.get_src_line() as usize, *count))
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<(usize, usize)>>();
|
||||
|
||||
line_counts.sort_unstable_by_key(|(index, _)| *index);
|
||||
line_counts.dedup_by_key(|(index, _)| *index);
|
||||
|
||||
line_counts
|
||||
} else {
|
||||
line_counts
|
||||
};
|
||||
|
||||
print!("cover {} ... ", script_coverage.url);
|
||||
|
||||
let hit_lines = line_counts
|
||||
.iter()
|
||||
.filter(|(_, count)| *count != 0)
|
||||
.map(|(index, _)| *index);
|
||||
|
||||
let missed_lines = line_counts
|
||||
.iter()
|
||||
.filter(|(_, count)| *count == 0)
|
||||
.map(|(index, _)| *index);
|
||||
|
||||
let lines_found = line_counts.len();
|
||||
let lines_hit = hit_lines.count();
|
||||
let line_ratio = lines_hit as f32 / lines_found as f32;
|
||||
|
||||
let line_coverage =
|
||||
format!("{:.3}% ({}/{})", line_ratio * 100.0, lines_hit, lines_found,);
|
||||
|
||||
if line_ratio >= 0.9 {
|
||||
println!("{}", colors::green(&line_coverage));
|
||||
|
@ -387,31 +557,35 @@ impl CoverageReporter for PrettyCoverageReporter {
|
|||
println!("{}", colors::red(&line_coverage));
|
||||
}
|
||||
|
||||
let mut maybe_last_index = None;
|
||||
for (index, line) in missed_lines {
|
||||
if let Some(last_index) = maybe_last_index {
|
||||
if last_index + 1 != index {
|
||||
let dash = colors::gray("-".repeat(PRETTY_LINE_WIDTH + 1));
|
||||
println!("{}{}{}", dash, colors::gray(PRETTY_LINE_SEPERATOR), dash);
|
||||
}
|
||||
}
|
||||
let mut last_line = None;
|
||||
for line_index in missed_lines {
|
||||
const WIDTH: usize = 4;
|
||||
const SEPERATOR: &str = "|";
|
||||
|
||||
let slice = &source[line.start_offset..line.end_offset];
|
||||
// Put a horizontal separator between disjoint runs of lines
|
||||
if let Some(last_line) = last_line {
|
||||
if last_line + 1 != line_index {
|
||||
let dash = colors::gray("-".repeat(WIDTH + 1));
|
||||
println!("{}{}{}", dash, colors::gray(SEPERATOR), dash);
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:width$} {} {}",
|
||||
index + 1,
|
||||
colors::gray(PRETTY_LINE_SEPERATOR),
|
||||
colors::red(&slice),
|
||||
width = PRETTY_LINE_WIDTH,
|
||||
line_index + 1,
|
||||
colors::gray(SEPERATOR),
|
||||
colors::red(&lines[line_index]),
|
||||
width = WIDTH
|
||||
);
|
||||
|
||||
maybe_last_index = Some(index);
|
||||
}
|
||||
last_line = Some(line_index);
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_script_coverages(
|
||||
fn done(&mut self) {}
|
||||
}
|
||||
|
||||
fn collect_coverages(
|
||||
files: Vec<PathBuf>,
|
||||
ignore: Vec<PathBuf>,
|
||||
) -> Result<Vec<ScriptCoverage>, AnyError> {
|
||||
|
@ -462,7 +636,7 @@ fn collect_script_coverages(
|
|||
Ok(coverages)
|
||||
}
|
||||
|
||||
fn filter_script_coverages(
|
||||
fn filter_coverages(
|
||||
coverages: Vec<ScriptCoverage>,
|
||||
include: Vec<String>,
|
||||
exclude: Vec<String>,
|
||||
|
@ -488,321 +662,7 @@ fn filter_script_coverages(
|
|||
.collect::<Vec<ScriptCoverage>>()
|
||||
}
|
||||
|
||||
fn offset_to_line_col(source: &str, offset: usize) -> Option<(u32, u32)> {
|
||||
let mut line = 0;
|
||||
let mut col = 0;
|
||||
|
||||
if let Some(slice) = source.get(0..offset) {
|
||||
for ch in slice.bytes() {
|
||||
if ch == b'\n' {
|
||||
line += 1;
|
||||
col = 0;
|
||||
} else {
|
||||
col += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return Some((line, col));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn line_col_to_offset(source: &str, line: u32, col: u32) -> Option<usize> {
|
||||
let mut current_col = 0;
|
||||
let mut current_line = 0;
|
||||
|
||||
for (i, ch) in source.bytes().enumerate() {
|
||||
if current_line == line && current_col == col {
|
||||
return Some(i);
|
||||
}
|
||||
|
||||
if ch == b'\n' {
|
||||
current_line += 1;
|
||||
current_col = 0;
|
||||
} else {
|
||||
current_col += 1;
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
async fn cover_script(
|
||||
program_state: ProcState,
|
||||
script: ScriptCoverage,
|
||||
) -> Result<CoverageResult, AnyError> {
|
||||
let module_specifier = resolve_url_or_path(&script.url)?;
|
||||
let file = program_state
|
||||
.file_fetcher
|
||||
.fetch(&module_specifier, &mut Permissions::allow_all())
|
||||
.await?;
|
||||
|
||||
let source = file.source.as_str();
|
||||
|
||||
let line_offsets = {
|
||||
let mut line_offsets: Vec<(usize, usize)> = Vec::new();
|
||||
let mut offset = 0;
|
||||
|
||||
for line in source.split('\n') {
|
||||
line_offsets.push((offset, offset + line.len()));
|
||||
offset += line.len() + 1;
|
||||
}
|
||||
|
||||
line_offsets
|
||||
};
|
||||
|
||||
program_state
|
||||
.prepare_module_load(
|
||||
module_specifier.clone(),
|
||||
TypeLib::UnstableDenoWindow,
|
||||
Permissions::allow_all(),
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
program_state.maybe_import_map.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let compiled_source =
|
||||
program_state.load(module_specifier.clone(), None)?.code;
|
||||
|
||||
// TODO(caspervonb): source mapping is still a bit of a mess and we should try look into avoiding
|
||||
// doing any loads at this stage of execution but it'll do for now.
|
||||
let maybe_raw_source_map = program_state.get_source_map(&script.url);
|
||||
if let Some(raw_source_map) = maybe_raw_source_map {
|
||||
let source_map = SourceMap::from_slice(&raw_source_map)?;
|
||||
|
||||
// To avoid false positives we base our line ranges on the ranges of the compiled lines
|
||||
let compiled_line_offsets = {
|
||||
let mut line_offsets: Vec<(usize, usize)> = Vec::new();
|
||||
let mut offset = 0;
|
||||
|
||||
for line in compiled_source.split('\n') {
|
||||
line_offsets.push((offset, offset + line.len()));
|
||||
offset += line.len() + 1;
|
||||
}
|
||||
|
||||
line_offsets
|
||||
};
|
||||
|
||||
// First we get the adjusted ranges of these lines
|
||||
let compiled_line_ranges = compiled_line_offsets
|
||||
.iter()
|
||||
.filter_map(|(start_offset, end_offset)| {
|
||||
// We completely ignore empty lines, they just cause trouble and can't map to anything
|
||||
// meaningful.
|
||||
let line = &compiled_source[*start_offset..*end_offset];
|
||||
if line == "\n" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ranges = script
|
||||
.functions
|
||||
.iter()
|
||||
.map(|function| {
|
||||
function.ranges.iter().filter_map(|function_range| {
|
||||
if &function_range.start_offset > end_offset {
|
||||
return None;
|
||||
}
|
||||
|
||||
if &function_range.end_offset < start_offset {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(CoverageRange {
|
||||
start_offset: cmp::max(
|
||||
*start_offset,
|
||||
function_range.start_offset,
|
||||
),
|
||||
end_offset: cmp::min(*end_offset, function_range.end_offset),
|
||||
count: function_range.count,
|
||||
})
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<CoverageRange>>();
|
||||
|
||||
Some(ranges)
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<CoverageRange>>();
|
||||
|
||||
// Then we map those adjusted ranges from their closest tokens to their source locations.
|
||||
let mapped_line_ranges = compiled_line_ranges
|
||||
.iter()
|
||||
.map(|line_range| {
|
||||
let (start_line, start_col) =
|
||||
offset_to_line_col(&compiled_source, line_range.start_offset)
|
||||
.unwrap();
|
||||
|
||||
let start_token =
|
||||
source_map.lookup_token(start_line, start_col).unwrap();
|
||||
|
||||
let (end_line, end_col) =
|
||||
offset_to_line_col(&compiled_source, line_range.end_offset).unwrap();
|
||||
|
||||
let end_token = source_map.lookup_token(end_line, end_col).unwrap();
|
||||
|
||||
let mapped_start_offset = line_col_to_offset(
|
||||
source,
|
||||
start_token.get_src_line(),
|
||||
start_token.get_src_col(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mapped_end_offset = line_col_to_offset(
|
||||
source,
|
||||
end_token.get_src_line(),
|
||||
end_token.get_src_col(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
CoverageRange {
|
||||
start_offset: mapped_start_offset,
|
||||
end_offset: mapped_end_offset,
|
||||
count: line_range.count,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<CoverageRange>>();
|
||||
|
||||
// Then we go through the source lines and grab any ranges that apply to any given line
|
||||
// adjusting them as we go.
|
||||
let lines = line_offsets
|
||||
.iter()
|
||||
.map(|(start_offset, end_offset)| {
|
||||
let ranges = mapped_line_ranges
|
||||
.iter()
|
||||
.filter_map(|line_range| {
|
||||
if &line_range.start_offset > end_offset {
|
||||
return None;
|
||||
}
|
||||
|
||||
if &line_range.end_offset < start_offset {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(CoverageRange {
|
||||
start_offset: cmp::max(*start_offset, line_range.start_offset),
|
||||
end_offset: cmp::min(*end_offset, line_range.end_offset),
|
||||
count: line_range.count,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
LineCoverage {
|
||||
start_offset: *start_offset,
|
||||
end_offset: *end_offset,
|
||||
ranges,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let functions = script
|
||||
.functions
|
||||
.iter()
|
||||
.map(|function| {
|
||||
let ranges = function
|
||||
.ranges
|
||||
.iter()
|
||||
.map(|function_range| {
|
||||
let (start_line, start_col) =
|
||||
offset_to_line_col(&compiled_source, function_range.start_offset)
|
||||
.unwrap();
|
||||
|
||||
let start_token =
|
||||
source_map.lookup_token(start_line, start_col).unwrap();
|
||||
|
||||
let mapped_start_offset = line_col_to_offset(
|
||||
source,
|
||||
start_token.get_src_line(),
|
||||
start_token.get_src_col(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let (end_line, end_col) =
|
||||
offset_to_line_col(&compiled_source, function_range.end_offset)
|
||||
.unwrap();
|
||||
|
||||
let end_token = source_map.lookup_token(end_line, end_col).unwrap();
|
||||
|
||||
let mapped_end_offset = line_col_to_offset(
|
||||
source,
|
||||
end_token.get_src_line(),
|
||||
end_token.get_src_col(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
CoverageRange {
|
||||
start_offset: mapped_start_offset,
|
||||
end_offset: mapped_end_offset,
|
||||
count: function_range.count,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
FunctionCoverage {
|
||||
ranges,
|
||||
is_block_coverage: function.is_block_coverage,
|
||||
function_name: function.function_name.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<FunctionCoverage>>();
|
||||
|
||||
return Ok(CoverageResult { lines, functions });
|
||||
}
|
||||
|
||||
let functions = script.functions.clone();
|
||||
|
||||
let lines = line_offsets
|
||||
.iter()
|
||||
.map(|(start_offset, end_offset)| {
|
||||
let line = &source[*start_offset..*end_offset];
|
||||
if line == "\n" {
|
||||
return LineCoverage {
|
||||
start_offset: *start_offset,
|
||||
end_offset: *end_offset,
|
||||
ranges: Vec::new(),
|
||||
};
|
||||
}
|
||||
|
||||
let ranges = script
|
||||
.functions
|
||||
.iter()
|
||||
.map(|function| {
|
||||
function.ranges.iter().filter_map(|function_range| {
|
||||
if &function_range.start_offset > end_offset {
|
||||
return None;
|
||||
}
|
||||
|
||||
if &function_range.end_offset < start_offset {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(CoverageRange {
|
||||
start_offset: cmp::max(
|
||||
*start_offset,
|
||||
function_range.start_offset,
|
||||
),
|
||||
end_offset: cmp::min(*end_offset, function_range.end_offset),
|
||||
count: function_range.count,
|
||||
})
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
LineCoverage {
|
||||
start_offset: *start_offset,
|
||||
end_offset: *end_offset,
|
||||
ranges,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(CoverageResult { lines, functions })
|
||||
}
|
||||
|
||||
pub async fn cover_scripts(
|
||||
pub async fn cover_files(
|
||||
flags: Flags,
|
||||
files: Vec<PathBuf>,
|
||||
ignore: Vec<PathBuf>,
|
||||
|
@ -812,9 +672,8 @@ pub async fn cover_scripts(
|
|||
) -> Result<(), AnyError> {
|
||||
let ps = ProcState::build(flags).await?;
|
||||
|
||||
let script_coverages = collect_script_coverages(files, ignore)?;
|
||||
let script_coverages =
|
||||
filter_script_coverages(script_coverages, include, exclude);
|
||||
let script_coverages = collect_coverages(files, ignore)?;
|
||||
let script_coverages = filter_coverages(script_coverages, include, exclude);
|
||||
|
||||
let reporter_kind = if lcov {
|
||||
CoverageReporterKind::Lcov
|
||||
|
@ -825,16 +684,36 @@ pub async fn cover_scripts(
|
|||
let mut reporter = create_reporter(reporter_kind);
|
||||
|
||||
for script_coverage in script_coverages {
|
||||
let result = cover_script(ps.clone(), script_coverage.clone()).await?;
|
||||
|
||||
let module_specifier = resolve_url_or_path(&script_coverage.url)?;
|
||||
let file = ps
|
||||
.file_fetcher
|
||||
.fetch(&module_specifier, &mut Permissions::allow_all())
|
||||
let module_specifier =
|
||||
deno_core::resolve_url_or_path(&script_coverage.url)?;
|
||||
ps.prepare_module_load(
|
||||
module_specifier.clone(),
|
||||
TypeLib::UnstableDenoWindow,
|
||||
Permissions::allow_all(),
|
||||
Permissions::allow_all(),
|
||||
false,
|
||||
ps.maybe_import_map.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
reporter.report_result(&module_specifier, &result, &file.source);
|
||||
let module_source = ps.load(module_specifier.clone(), None)?;
|
||||
let script_source = &module_source.code;
|
||||
|
||||
let maybe_source_map = ps.get_source_map(&script_coverage.url);
|
||||
let maybe_cached_source = ps
|
||||
.file_fetcher
|
||||
.get_source(&module_specifier)
|
||||
.map(|f| f.source);
|
||||
|
||||
reporter.visit_coverage(
|
||||
&script_coverage,
|
||||
script_source,
|
||||
maybe_source_map,
|
||||
maybe_cached_source,
|
||||
);
|
||||
}
|
||||
|
||||
reporter.done();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue