1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-12-24 08:09:08 -05:00

fix(lsp): lsp should respect include/exclude files in format config (#12876)

This commit is contained in:
David Sherret 2021-11-24 15:14:19 -05:00 committed by GitHub
parent 88000b5feb
commit adc5974333
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 391 additions and 148 deletions

View file

@ -1,8 +1,11 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use crate::fs_util::canonicalize_path; use crate::fs_util::canonicalize_path;
use crate::fs_util::specifier_parent;
use crate::fs_util::specifier_to_file_path;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context; use deno_core::anyhow::Context;
use deno_core::error::custom_error; use deno_core::error::custom_error;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -17,7 +20,6 @@ use std::collections::BTreeMap;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
pub(crate) type MaybeImportsResult = pub(crate) type MaybeImportsResult =
Result<Option<Vec<(ModuleSpecifier, Vec<String>)>>, AnyError>; Result<Option<Vec<(ModuleSpecifier, Vec<String>)>>, AnyError>;
@ -54,15 +56,15 @@ pub struct CompilerOptions {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct IgnoredCompilerOptions { pub struct IgnoredCompilerOptions {
pub items: Vec<String>, pub items: Vec<String>,
pub maybe_path: Option<PathBuf>, pub maybe_specifier: Option<ModuleSpecifier>,
} }
impl fmt::Display for IgnoredCompilerOptions { impl fmt::Display for IgnoredCompilerOptions {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut codes = self.items.clone(); let mut codes = self.items.clone();
codes.sort(); codes.sort();
if let Some(path) = &self.maybe_path { if let Some(specifier) = &self.maybe_specifier {
write!(f, "Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}", path.to_string_lossy(), codes.join(", ")) write!(f, "Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}", specifier, codes.join(", "))
} else { } else {
write!(f, "Unsupported compiler options provided.\n The following options were ignored:\n {}", codes.join(", ")) write!(f, "Unsupported compiler options provided.\n The following options were ignored:\n {}", codes.join(", "))
} }
@ -169,7 +171,7 @@ pub fn json_merge(a: &mut Value, b: &Value) {
fn parse_compiler_options( fn parse_compiler_options(
compiler_options: &HashMap<String, Value>, compiler_options: &HashMap<String, Value>,
maybe_path: Option<PathBuf>, maybe_specifier: Option<ModuleSpecifier>,
is_runtime: bool, is_runtime: bool,
) -> Result<(Value, Option<IgnoredCompilerOptions>), AnyError> { ) -> Result<(Value, Option<IgnoredCompilerOptions>), AnyError> {
let mut filtered: HashMap<String, Value> = HashMap::new(); let mut filtered: HashMap<String, Value> = HashMap::new();
@ -187,7 +189,10 @@ fn parse_compiler_options(
} }
let value = serde_json::to_value(filtered)?; let value = serde_json::to_value(filtered)?;
let maybe_ignored_options = if !items.is_empty() { let maybe_ignored_options = if !items.is_empty() {
Some(IgnoredCompilerOptions { items, maybe_path }) Some(IgnoredCompilerOptions {
items,
maybe_specifier,
})
} else { } else {
None None
}; };
@ -292,27 +297,53 @@ struct SerializedFilesConfig {
} }
impl SerializedFilesConfig { impl SerializedFilesConfig {
pub fn into_resolved(self, config_file_path: &Path) -> FilesConfig { pub fn into_resolved(
let config_dir = config_file_path.parent().unwrap(); self,
FilesConfig { config_file_specifier: &ModuleSpecifier,
) -> Result<FilesConfig, AnyError> {
let config_dir = specifier_parent(config_file_specifier);
Ok(FilesConfig {
include: self include: self
.include .include
.into_iter() .into_iter()
.map(|p| config_dir.join(p)) .map(|p| config_dir.join(&p))
.collect(), .collect::<Result<Vec<ModuleSpecifier>, _>>()?,
exclude: self exclude: self
.exclude .exclude
.into_iter() .into_iter()
.map(|p| config_dir.join(p)) .map(|p| config_dir.join(&p))
.collect(), .collect::<Result<Vec<ModuleSpecifier>, _>>()?,
} })
} }
} }
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
pub struct FilesConfig { pub struct FilesConfig {
pub include: Vec<PathBuf>, pub include: Vec<ModuleSpecifier>,
pub exclude: Vec<PathBuf>, pub exclude: Vec<ModuleSpecifier>,
}
impl FilesConfig {
/// Gets if the provided specifier is allowed based on the includes
/// and excludes in the configuration file.
pub fn matches_specifier(&self, specifier: &ModuleSpecifier) -> bool {
// Skip files which is in the exclude list.
let specifier_text = specifier.as_str();
if self
.exclude
.iter()
.any(|i| specifier_text.starts_with(i.as_str()))
{
return false;
}
// Ignore files not in the include list if it's not empty.
self.include.is_empty()
|| self
.include
.iter()
.any(|i| specifier_text.starts_with(i.as_str()))
}
} }
#[derive(Clone, Debug, Default, Deserialize)] #[derive(Clone, Debug, Default, Deserialize)]
@ -323,11 +354,14 @@ struct SerializedLintConfig {
} }
impl SerializedLintConfig { impl SerializedLintConfig {
pub fn into_resolved(self, config_file_path: &Path) -> LintConfig { pub fn into_resolved(
LintConfig { self,
config_file_specifier: &ModuleSpecifier,
) -> Result<LintConfig, AnyError> {
Ok(LintConfig {
rules: self.rules, rules: self.rules,
files: self.files.into_resolved(config_file_path), files: self.files.into_resolved(config_file_specifier)?,
} })
} }
} }
@ -363,11 +397,14 @@ struct SerializedFmtConfig {
} }
impl SerializedFmtConfig { impl SerializedFmtConfig {
pub fn into_resolved(self, config_file_path: &Path) -> FmtConfig { pub fn into_resolved(
FmtConfig { self,
config_file_specifier: &ModuleSpecifier,
) -> Result<FmtConfig, AnyError> {
Ok(FmtConfig {
options: self.options, options: self.options,
files: self.files.into_resolved(config_file_path), files: self.files.into_resolved(config_file_specifier)?,
} })
} }
} }
@ -387,7 +424,7 @@ pub struct ConfigFileJson {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ConfigFile { pub struct ConfigFile {
pub path: PathBuf, pub specifier: ModuleSpecifier,
pub json: ConfigFileJson, pub json: ConfigFileJson,
} }
@ -409,24 +446,46 @@ impl ConfigFile {
), ),
) )
})?; })?;
let config_text = std::fs::read_to_string(config_path.clone())?; let config_specifier = ModuleSpecifier::from_file_path(&config_path)
Self::new(&config_text, &config_path) .map_err(|_| {
anyhow!(
"Could not convert path to specifier. Path: {}",
config_path.display()
)
})?;
Self::from_specifier(&config_specifier)
} }
pub fn new(text: &str, path: &Path) -> Result<Self, AnyError> { pub fn from_specifier(specifier: &ModuleSpecifier) -> Result<Self, AnyError> {
let config_path = specifier_to_file_path(specifier)?;
let config_text = match std::fs::read_to_string(&config_path) {
Ok(text) => text,
Err(err) => bail!(
"Error reading config file {}: {}",
specifier,
err.to_string()
),
};
Self::new(&config_text, specifier)
}
pub fn new(
text: &str,
specifier: &ModuleSpecifier,
) -> Result<Self, AnyError> {
let jsonc = match jsonc_parser::parse_to_serde_value(text) { let jsonc = match jsonc_parser::parse_to_serde_value(text) {
Ok(None) => json!({}), Ok(None) => json!({}),
Ok(Some(value)) if value.is_object() => value, Ok(Some(value)) if value.is_object() => value,
Ok(Some(_)) => { Ok(Some(_)) => {
return Err(anyhow!( return Err(anyhow!(
"config file JSON {:?} should be an object", "config file JSON {:?} should be an object",
path.to_str().unwrap() specifier,
)) ))
} }
Err(e) => { Err(e) => {
return Err(anyhow!( return Err(anyhow!(
"Unable to parse config file JSON {:?} because of {}", "Unable to parse config file JSON {:?} because of {}",
path.to_str().unwrap(), specifier,
e.to_string() e.to_string()
)) ))
} }
@ -434,7 +493,7 @@ impl ConfigFile {
let json: ConfigFileJson = serde_json::from_value(jsonc)?; let json: ConfigFileJson = serde_json::from_value(jsonc)?;
Ok(Self { Ok(Self {
path: path.to_owned(), specifier: specifier.to_owned(),
json, json,
}) })
} }
@ -448,7 +507,7 @@ impl ConfigFile {
let options: HashMap<String, Value> = let options: HashMap<String, Value> =
serde_json::from_value(compiler_options) serde_json::from_value(compiler_options)
.context("compilerOptions should be an object")?; .context("compilerOptions should be an object")?;
parse_compiler_options(&options, Some(self.path.to_owned()), false) parse_compiler_options(&options, Some(self.specifier.to_owned()), false)
} else { } else {
Ok((json!({}), None)) Ok((json!({}), None))
} }
@ -458,7 +517,7 @@ impl ConfigFile {
if let Some(config) = self.json.lint.clone() { if let Some(config) = self.json.lint.clone() {
let lint_config: SerializedLintConfig = serde_json::from_value(config) let lint_config: SerializedLintConfig = serde_json::from_value(config)
.context("Failed to parse \"lint\" configuration")?; .context("Failed to parse \"lint\" configuration")?;
Ok(Some(lint_config.into_resolved(&self.path))) Ok(Some(lint_config.into_resolved(&self.specifier)?))
} else { } else {
Ok(None) Ok(None)
} }
@ -476,8 +535,6 @@ impl ConfigFile {
}; };
let compiler_options: CompilerOptions = let compiler_options: CompilerOptions =
serde_json::from_value(compiler_options_value.clone())?; serde_json::from_value(compiler_options_value.clone())?;
let referrer = ModuleSpecifier::from_file_path(&self.path)
.map_err(|_| custom_error("TypeError", "bad config file specifier"))?;
if let Some(types) = compiler_options.types { if let Some(types) = compiler_options.types {
imports.extend(types); imports.extend(types);
} }
@ -493,6 +550,7 @@ impl ConfigFile {
)); ));
} }
if !imports.is_empty() { if !imports.is_empty() {
let referrer = self.specifier.clone();
Ok(Some(vec![(referrer, imports)])) Ok(Some(vec![(referrer, imports)]))
} else { } else {
Ok(None) Ok(None)
@ -516,7 +574,7 @@ impl ConfigFile {
if let Some(config) = self.json.fmt.clone() { if let Some(config) = self.json.fmt.clone() {
let fmt_config: SerializedFmtConfig = serde_json::from_value(config) let fmt_config: SerializedFmtConfig = serde_json::from_value(config)
.context("Failed to parse \"fmt\" configuration")?; .context("Failed to parse \"fmt\" configuration")?;
Ok(Some(fmt_config.into_resolved(&self.path))) Ok(Some(fmt_config.into_resolved(&self.specifier)?))
} else { } else {
Ok(None) Ok(None)
} }
@ -603,9 +661,9 @@ mod tests {
} }
} }
}"#; }"#;
let config_dir = PathBuf::from("/deno"); let config_dir = ModuleSpecifier::parse("file:///deno/").unwrap();
let config_path = config_dir.join("tsconfig.json"); let config_specifier = config_dir.join("tsconfig.json").unwrap();
let config_file = ConfigFile::new(config_text, &config_path).unwrap(); let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
let (options_value, ignored) = let (options_value, ignored) =
config_file.to_compiler_options().expect("error parsing"); config_file.to_compiler_options().expect("error parsing");
assert!(options_value.is_object()); assert!(options_value.is_object());
@ -616,7 +674,7 @@ mod tests {
ignored, ignored,
Some(IgnoredCompilerOptions { Some(IgnoredCompilerOptions {
items: vec!["build".to_string()], items: vec!["build".to_string()],
maybe_path: Some(config_path), maybe_specifier: Some(config_specifier),
}), }),
); );
@ -624,10 +682,13 @@ mod tests {
.to_lint_config() .to_lint_config()
.expect("error parsing lint object") .expect("error parsing lint object")
.expect("lint object should be defined"); .expect("lint object should be defined");
assert_eq!(lint_config.files.include, vec![config_dir.join("src")]); assert_eq!(
lint_config.files.include,
vec![config_dir.join("src/").unwrap()]
);
assert_eq!( assert_eq!(
lint_config.files.exclude, lint_config.files.exclude,
vec![config_dir.join("src/testdata/")] vec![config_dir.join("src/testdata/").unwrap()]
); );
assert_eq!( assert_eq!(
lint_config.rules.include, lint_config.rules.include,
@ -643,10 +704,13 @@ mod tests {
.to_fmt_config() .to_fmt_config()
.expect("error parsing fmt object") .expect("error parsing fmt object")
.expect("fmt object should be defined"); .expect("fmt object should be defined");
assert_eq!(fmt_config.files.include, vec![config_dir.join("src")]); assert_eq!(
fmt_config.files.include,
vec![config_dir.join("src/").unwrap()]
);
assert_eq!( assert_eq!(
fmt_config.files.exclude, fmt_config.files.exclude,
vec![config_dir.join("src/testdata/")] vec![config_dir.join("src/testdata/").unwrap()]
); );
assert_eq!(fmt_config.options.use_tabs, Some(true)); assert_eq!(fmt_config.options.use_tabs, Some(true));
assert_eq!(fmt_config.options.line_width, Some(80)); assert_eq!(fmt_config.options.line_width, Some(80));
@ -657,8 +721,9 @@ mod tests {
#[test] #[test]
fn test_parse_config_with_empty_file() { fn test_parse_config_with_empty_file() {
let config_text = ""; let config_text = "";
let config_path = PathBuf::from("/deno/tsconfig.json"); let config_specifier =
let config_file = ConfigFile::new(config_text, &config_path).unwrap(); ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
let (options_value, _) = let (options_value, _) =
config_file.to_compiler_options().expect("error parsing"); config_file.to_compiler_options().expect("error parsing");
assert!(options_value.is_object()); assert!(options_value.is_object());
@ -667,8 +732,9 @@ mod tests {
#[test] #[test]
fn test_parse_config_with_commented_file() { fn test_parse_config_with_commented_file() {
let config_text = r#"//{"foo":"bar"}"#; let config_text = r#"//{"foo":"bar"}"#;
let config_path = PathBuf::from("/deno/tsconfig.json"); let config_specifier =
let config_file = ConfigFile::new(config_text, &config_path).unwrap(); ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
let config_file = ConfigFile::new(config_text, &config_specifier).unwrap();
let (options_value, _) = let (options_value, _) =
config_file.to_compiler_options().expect("error parsing"); config_file.to_compiler_options().expect("error parsing");
assert!(options_value.is_object()); assert!(options_value.is_object());
@ -677,17 +743,19 @@ mod tests {
#[test] #[test]
fn test_parse_config_with_invalid_file() { fn test_parse_config_with_invalid_file() {
let config_text = "{foo:bar}"; let config_text = "{foo:bar}";
let config_path = PathBuf::from("/deno/tsconfig.json"); let config_specifier =
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
// Emit error: Unable to parse config file JSON "<config_path>" because of Unexpected token on line 1 column 6. // Emit error: Unable to parse config file JSON "<config_path>" because of Unexpected token on line 1 column 6.
assert!(ConfigFile::new(config_text, &config_path).is_err()); assert!(ConfigFile::new(config_text, &config_specifier).is_err());
} }
#[test] #[test]
fn test_parse_config_with_not_object_file() { fn test_parse_config_with_not_object_file() {
let config_text = "[]"; let config_text = "[]";
let config_path = PathBuf::from("/deno/tsconfig.json"); let config_specifier =
ModuleSpecifier::parse("file:///deno/tsconfig.json").unwrap();
// Emit error: config file JSON "<config_path>" should be an object // Emit error: config file JSON "<config_path>" should be an object
assert!(ConfigFile::new(config_text, &config_path).is_err()); assert!(ConfigFile::new(config_text, &config_specifier).is_err());
} }
#[test] #[test]
@ -717,7 +785,7 @@ mod tests {
maybe_ignored_options, maybe_ignored_options,
Some(IgnoredCompilerOptions { Some(IgnoredCompilerOptions {
items: vec!["build".to_string()], items: vec!["build".to_string()],
maybe_path: None maybe_specifier: None
}) })
); );
} }

View file

@ -350,6 +350,37 @@ pub fn specifier_to_file_path(
} }
} }
/// Ensures a specifier that will definitely be a directory has a trailing slash.
pub fn ensure_directory_specifier(
mut specifier: ModuleSpecifier,
) -> ModuleSpecifier {
let path = specifier.path();
if !path.ends_with('/') {
let new_path = format!("{}/", path);
specifier.set_path(&new_path);
}
specifier
}
/// Gets the parent of this module specifier.
pub fn specifier_parent(specifier: &ModuleSpecifier) -> ModuleSpecifier {
let mut specifier = specifier.clone();
// don't use specifier.segments() because it will strip the leading slash
let mut segments = specifier.path().split('/').collect::<Vec<_>>();
if segments.iter().all(|s| s.is_empty()) {
return specifier;
}
if let Some(last) = segments.last() {
if last.is_empty() {
segments.pop();
}
segments.pop();
let new_path = format!("{}/", segments.join("/"));
specifier.set_path(&new_path);
}
specifier
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -688,4 +719,35 @@ mod tests {
assert_eq!(result, PathBuf::from(expected_path)); assert_eq!(result, PathBuf::from(expected_path));
} }
} }
#[test]
fn test_ensure_directory_specifier() {
run_test("file:///", "file:///");
run_test("file:///test", "file:///test/");
run_test("file:///test/", "file:///test/");
run_test("file:///test/other", "file:///test/other/");
run_test("file:///test/other/", "file:///test/other/");
fn run_test(specifier: &str, expected: &str) {
let result =
ensure_directory_specifier(ModuleSpecifier::parse(specifier).unwrap());
assert_eq!(result.to_string(), expected);
}
}
#[test]
fn test_specifier_parent() {
run_test("file:///", "file:///");
run_test("file:///test", "file:///");
run_test("file:///test/", "file:///");
run_test("file:///test/other", "file:///test/");
run_test("file:///test/other.txt", "file:///test/");
run_test("file:///test/other/", "file:///test/");
fn run_test(specifier: &str, expected: &str) {
let result =
specifier_parent(&ModuleSpecifier::parse(specifier).unwrap());
assert_eq!(result.to_string(), expected);
}
}
} }

View file

@ -2,13 +2,11 @@
use super::analysis; use super::analysis;
use super::documents; use super::documents;
use super::documents::Document;
use super::documents::Documents; use super::documents::Documents;
use super::language_server; use super::language_server;
use super::tsc; use super::tsc;
use crate::diagnostics; use crate::diagnostics;
use crate::fs_util::specifier_to_file_path;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -302,59 +300,14 @@ fn ts_json_to_diagnostics(
.collect() .collect()
} }
// Filters documents according to the `include` and the `exclude` lists (from `StateSnapshot::maybe_lint_config`).
// If a document is in the `exclude` list - then it be removed.
// If the `include` list is not empty, and a document is not in - then it be removed too.
fn filter_lint_documents(
snapshot: &language_server::StateSnapshot,
documents: &mut Vec<Document>,
) {
let lint_config = match &snapshot.maybe_lint_config {
Some(config) => config,
None => return,
};
documents.retain(|doc| {
let path = if let Ok(file_path) = specifier_to_file_path(doc.specifier()) {
file_path
} else {
return false;
};
// Skip files which is in the exclude list.
if lint_config
.files
.exclude
.iter()
.any(|i| path.starts_with(i))
{
return false;
}
// Early return if the include list is empty.
if lint_config.files.include.is_empty() {
return true;
}
// Ignore files not in the include list.
lint_config
.files
.include
.iter()
.any(|i| path.starts_with(i))
});
}
async fn generate_lint_diagnostics( async fn generate_lint_diagnostics(
snapshot: &language_server::StateSnapshot, snapshot: &language_server::StateSnapshot,
collection: Arc<Mutex<DiagnosticCollection>>, collection: Arc<Mutex<DiagnosticCollection>>,
) -> Result<DiagnosticVec, AnyError> { ) -> Result<DiagnosticVec, AnyError> {
let mut documents = snapshot.documents.documents(true, true); let documents = snapshot.documents.documents(true, true);
let workspace_settings = snapshot.config.settings.workspace.clone(); let workspace_settings = snapshot.config.settings.workspace.clone();
let maybe_lint_config = snapshot.maybe_lint_config.clone(); let maybe_lint_config = snapshot.maybe_lint_config.clone();
filter_lint_documents(snapshot, &mut documents);
tokio::task::spawn(async move { tokio::task::spawn(async move {
let mut diagnostics_vec = Vec::new(); let mut diagnostics_vec = Vec::new();
if workspace_settings.lint { if workspace_settings.lint {
@ -365,25 +318,35 @@ async fn generate_lint_diagnostics(
.await .await
.get_version(document.specifier(), &DiagnosticSource::DenoLint); .get_version(document.specifier(), &DiagnosticSource::DenoLint);
if version != current_version { if version != current_version {
let diagnostics = match document.maybe_parsed_source() { let is_allowed = match &maybe_lint_config {
Some(Ok(parsed_source)) => { Some(lint_config) => {
if let Ok(references) = analysis::get_lint_references( lint_config.files.matches_specifier(document.specifier())
&parsed_source, }
maybe_lint_config.as_ref(), None => true,
) { };
references let diagnostics = if is_allowed {
.into_iter() match document.maybe_parsed_source() {
.map(|r| r.to_diagnostic()) Some(Ok(parsed_source)) => {
.collect::<Vec<_>>() if let Ok(references) = analysis::get_lint_references(
} else { &parsed_source,
maybe_lint_config.as_ref(),
) {
references
.into_iter()
.map(|r| r.to_diagnostic())
.collect::<Vec<_>>()
} else {
Vec::new()
}
}
Some(Err(_)) => Vec::new(),
None => {
error!("Missing file contents for: {}", document.specifier());
Vec::new() Vec::new()
} }
} }
Some(Err(_)) => Vec::new(), } else {
None => { Vec::new()
error!("Missing file contents for: {}", document.specifier());
Vec::new()
}
}; };
diagnostics_vec.push(( diagnostics_vec.push((
document.specifier().clone(), document.specifier().clone(),

View file

@ -60,7 +60,6 @@ use crate::config_file::TsConfig;
use crate::deno_dir; use crate::deno_dir;
use crate::file_fetcher::get_source_from_data_url; use crate::file_fetcher::get_source_from_data_url;
use crate::fs_util; use crate::fs_util;
use crate::fs_util::specifier_to_file_path;
use crate::logger; use crate::logger;
use crate::tools::fmt::format_file; use crate::tools::fmt::format_file;
use crate::tools::fmt::format_parsed_source; use crate::tools::fmt::format_parsed_source;
@ -304,9 +303,7 @@ impl Inner {
let config_url = if let Ok(url) = Url::from_file_path(config_str) { let config_url = if let Ok(url) = Url::from_file_path(config_str) {
Ok(url) Ok(url)
} else if let Some(root_uri) = maybe_root_uri { } else if let Some(root_uri) = maybe_root_uri {
let root_path = specifier_to_file_path(&root_uri)?; root_uri.join(config_str).map_err(|_| {
let config_path = root_path.join(config_str);
Url::from_file_path(config_path).map_err(|_| {
anyhow!("Bad file path for configuration file: \"{}\"", config_str) anyhow!("Bad file path for configuration file: \"{}\"", config_str)
}) })
} else { } else {
@ -317,13 +314,7 @@ impl Inner {
}?; }?;
info!(" Resolved configuration file: \"{}\"", config_url); info!(" Resolved configuration file: \"{}\"", config_url);
let config_file = { let config_file = ConfigFile::from_specifier(&config_url)?;
let buffer = specifier_to_file_path(&config_url)?;
let path = buffer
.to_str()
.ok_or_else(|| anyhow!("Bad uri: \"{}\"", config_url))?;
ConfigFile::read(path)?
};
return Ok(Some((config_file, config_url))); return Ok(Some((config_file, config_url)));
} }
} }
@ -401,7 +392,7 @@ impl Inner {
let cache_url = if let Ok(url) = Url::from_file_path(cache_str) { let cache_url = if let Ok(url) = Url::from_file_path(cache_str) {
Ok(url) Ok(url)
} else if let Some(root_uri) = &maybe_root_uri { } else if let Some(root_uri) = &maybe_root_uri {
let root_path = specifier_to_file_path(root_uri)?; let root_path = fs_util::specifier_to_file_path(root_uri)?;
let cache_path = root_path.join(cache_str); let cache_path = root_path.join(cache_str);
Url::from_file_path(cache_path).map_err(|_| { Url::from_file_path(cache_path).map_err(|_| {
anyhow!("Bad file path for import path: {:?}", cache_str) anyhow!("Bad file path for import path: {:?}", cache_str)
@ -412,7 +403,7 @@ impl Inner {
cache_str cache_str
)) ))
}?; }?;
let cache_path = specifier_to_file_path(&cache_url)?; let cache_path = fs_util::specifier_to_file_path(&cache_url)?;
info!( info!(
" Resolved cache path: \"{}\"", " Resolved cache path: \"{}\"",
cache_path.to_string_lossy() cache_path.to_string_lossy()
@ -457,7 +448,7 @@ impl Inner {
anyhow!("Bad data url for import map: {:?}", import_map_str) anyhow!("Bad data url for import map: {:?}", import_map_str)
}) })
} else if let Some(root_uri) = &maybe_root_uri { } else if let Some(root_uri) = &maybe_root_uri {
let root_path = specifier_to_file_path(root_uri)?; let root_path = fs_util::specifier_to_file_path(root_uri)?;
let import_map_path = root_path.join(import_map_str); let import_map_path = root_path.join(import_map_str);
Url::from_file_path(import_map_path).map_err(|_| { Url::from_file_path(import_map_path).map_err(|_| {
anyhow!("Bad file path for import map: {:?}", import_map_str) anyhow!("Bad file path for import map: {:?}", import_map_str)
@ -472,7 +463,7 @@ impl Inner {
let import_map_json = if import_map_url.scheme() == "data" { let import_map_json = if import_map_url.scheme() == "data" {
get_source_from_data_url(&import_map_url)?.0 get_source_from_data_url(&import_map_url)?.0
} else { } else {
let import_map_path = specifier_to_file_path(&import_map_url)?; let import_map_path = fs_util::specifier_to_file_path(&import_map_url)?;
info!( info!(
" Resolved import map: \"{}\"", " Resolved import map: \"{}\"",
import_map_path.to_string_lossy() import_map_path.to_string_lossy()
@ -660,7 +651,12 @@ impl Inner {
{ {
let config = &mut self.config; let config = &mut self.config;
config.root_uri = params.root_uri; // sometimes this root uri may not have a trailing slash, so force it to
config.root_uri = params
.root_uri
.map(|s| self.url_map.normalize_url(&s))
.map(fs_util::ensure_directory_specifier);
if let Some(value) = params.initialization_options { if let Some(value) = params.initialization_options {
config.set_workspace_settings(value).map_err(|err| { config.set_workspace_settings(value).map_err(|err| {
error!("Cannot set workspace settings: {}", err); error!("Cannot set workspace settings: {}", err);
@ -1019,12 +1015,16 @@ impl Inner {
}; };
let mark = self.performance.mark("formatting", Some(&params)); let mark = self.performance.mark("formatting", Some(&params));
let file_path = let file_path =
specifier_to_file_path(&params.text_document.uri).map_err(|err| { fs_util::specifier_to_file_path(&specifier).map_err(|err| {
error!("{}", err); error!("{}", err);
LspError::invalid_request() LspError::invalid_request()
})?; })?;
let fmt_options = if let Some(fmt_config) = self.maybe_fmt_config.as_ref() { let fmt_options = if let Some(fmt_config) = self.maybe_fmt_config.as_ref() {
// skip formatting any files ignored by the config file
if !fmt_config.files.matches_specifier(&specifier) {
return Ok(None);
}
fmt_config.options.clone() fmt_config.options.clone()
} else { } else {
Default::default() Default::default()
@ -1907,7 +1907,7 @@ impl Inner {
.config .config
.root_uri .root_uri
.as_ref() .as_ref()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| fs_util::specifier_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new(); let mut resolved_items = Vec::<CallHierarchyIncomingCall>::new();
for item in incoming_calls.iter() { for item in incoming_calls.iter() {
if let Some(resolved) = item if let Some(resolved) = item
@ -1956,7 +1956,7 @@ impl Inner {
.config .config
.root_uri .root_uri
.as_ref() .as_ref()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| fs_util::specifier_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new(); let mut resolved_items = Vec::<CallHierarchyOutgoingCall>::new();
for item in outgoing_calls.iter() { for item in outgoing_calls.iter() {
if let Some(resolved) = item if let Some(resolved) = item
@ -2012,7 +2012,7 @@ impl Inner {
.config .config
.root_uri .root_uri
.as_ref() .as_ref()
.and_then(|uri| specifier_to_file_path(uri).ok()); .and_then(|uri| fs_util::specifier_to_file_path(uri).ok());
let mut resolved_items = Vec::<CallHierarchyItem>::new(); let mut resolved_items = Vec::<CallHierarchyItem>::new();
match one_or_many { match one_or_many {
tsc::OneOrMany::One(item) => { tsc::OneOrMany::One(item) => {

View file

@ -217,7 +217,7 @@ pub fn create_main_worker(
} }
} else if let Some(config_file) = &ps.maybe_config_file { } else if let Some(config_file) = &ps.maybe_config_file {
// otherwise we will use the path to the config file // otherwise we will use the path to the config file
config_file.path.to_str().map(|s| s.to_string()) Some(config_file.specifier.to_string())
} else { } else {
// otherwise we will use the path to the main module // otherwise we will use the path to the main module
Some(main_module.to_string()) Some(main_module.to_string())
@ -724,10 +724,8 @@ async fn create_graph_and_maybe_check(
if let Some(ignored_options) = maybe_ignored_options { if let Some(ignored_options) = maybe_ignored_options {
eprintln!("{}", ignored_options); eprintln!("{}", ignored_options);
} }
let maybe_config_specifier = ps let maybe_config_specifier =
.maybe_config_file ps.maybe_config_file.as_ref().map(|cf| cf.specifier.clone());
.as_ref()
.map(|cf| ModuleSpecifier::from_file_path(&cf.path).unwrap());
let check_result = emit::check_and_maybe_emit( let check_result = emit::check_and_maybe_emit(
graph.clone(), graph.clone(),
&mut cache, &mut cache,

View file

@ -500,7 +500,7 @@ impl ProcState {
let maybe_config_specifier = self let maybe_config_specifier = self
.maybe_config_file .maybe_config_file
.as_ref() .as_ref()
.map(|cf| ModuleSpecifier::from_file_path(&cf.path).unwrap()); .map(|cf| cf.specifier.clone());
let options = emit::CheckOptions { let options = emit::CheckOptions {
debug: self.flags.log_level == Some(log::Level::Debug), debug: self.flags.log_level == Some(log::Level::Debug),
emit_with_diagnostics: false, emit_with_diagnostics: false,

View file

@ -1,5 +1,6 @@
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use deno_ast::ModuleSpecifier;
use deno_core::serde::Deserialize; use deno_core::serde::Deserialize;
use deno_core::serde::Serialize; use deno_core::serde::Serialize;
use deno_core::serde_json; use deno_core::serde_json;
@ -1420,6 +1421,61 @@ fn lsp_format_mbc() {
shutdown(&mut client); shutdown(&mut client);
} }
#[test]
fn lsp_format_exclude_with_config() {
let temp_dir = TempDir::new().unwrap();
let mut params: lsp::InitializeParams =
serde_json::from_value(load_fixture("initialize_params.json")).unwrap();
let deno_fmt_jsonc =
serde_json::to_vec_pretty(&load_fixture("deno.fmt.exclude.jsonc")).unwrap();
fs::write(temp_dir.path().join("deno.fmt.jsonc"), deno_fmt_jsonc).unwrap();
params.root_uri = Some(Url::from_file_path(temp_dir.path()).unwrap());
if let Some(Value::Object(mut map)) = params.initialization_options {
map.insert("config".to_string(), json!("./deno.fmt.jsonc"));
params.initialization_options = Some(Value::Object(map));
}
let deno_exe = deno_exe_path();
let mut client = LspClient::new(&deno_exe).unwrap();
client
.write_request::<_, _, Value>("initialize", params)
.unwrap();
let file_uri =
ModuleSpecifier::from_file_path(temp_dir.path().join("ignored.ts"))
.unwrap()
.to_string();
did_open(
&mut client,
json!({
"textDocument": {
"uri": file_uri,
"languageId": "typescript",
"version": 1,
"text": "function myFunc(){}"
}
}),
);
let (maybe_res, maybe_err) = client
.write_request(
"textDocument/formatting",
json!({
"textDocument": {
"uri": file_uri
},
"options": {
"tabSize": 2,
"insertSpaces": true
}
}),
)
.unwrap();
assert!(maybe_err.is_none());
assert_eq!(maybe_res, Some(json!(null)));
shutdown(&mut client);
}
#[test] #[test]
fn lsp_large_doc_changes() { fn lsp_large_doc_changes() {
let mut client = init("initialize_params.json"); let mut client = init("initialize_params.json");
@ -4047,6 +4103,47 @@ fn lsp_lint_with_config() {
shutdown(&mut client); shutdown(&mut client);
} }
#[test]
fn lsp_lint_exclude_with_config() {
let temp_dir = TempDir::new().unwrap();
let mut params: lsp::InitializeParams =
serde_json::from_value(load_fixture("initialize_params.json")).unwrap();
let deno_lint_jsonc =
serde_json::to_vec_pretty(&load_fixture("deno.lint.exclude.jsonc"))
.unwrap();
fs::write(temp_dir.path().join("deno.lint.jsonc"), deno_lint_jsonc).unwrap();
params.root_uri = Some(Url::from_file_path(temp_dir.path()).unwrap());
if let Some(Value::Object(mut map)) = params.initialization_options {
map.insert("config".to_string(), json!("./deno.lint.jsonc"));
params.initialization_options = Some(Value::Object(map));
}
let deno_exe = deno_exe_path();
let mut client = LspClient::new(&deno_exe).unwrap();
client
.write_request::<_, _, Value>("initialize", params)
.unwrap();
let diagnostics = did_open(
&mut client,
json!({
"textDocument": {
"uri": ModuleSpecifier::from_file_path(temp_dir.path().join("ignored.ts")).unwrap().to_string(),
"languageId": "typescript",
"version": 1,
"text": "// TODO: fixme\nexport async function non_camel_case() {\nconsole.log(\"finished!\")\n}"
}
}),
);
let diagnostics = diagnostics
.into_iter()
.flat_map(|x| x.diagnostics)
.collect::<Vec<_>>();
assert_eq!(diagnostics, Vec::new());
shutdown(&mut client);
}
#[test] #[test]
fn lsp_jsx_import_source_pragma() { fn lsp_jsx_import_source_pragma() {
let _g = http_server(); let _g = http_server();

View file

@ -0,0 +1,16 @@
{
"fmt": {
"files": {
"exclude": [
"ignored.ts"
]
},
"options": {
"useTabs": true,
"lineWidth": 40,
"indentWidth": 8,
"singleQuote": true,
"proseWrap": "always"
}
}
}

View file

@ -0,0 +1,18 @@
{
"lint": {
"files": {
"exclude": [
"ignored.ts"
]
},
"rules": {
"exclude": [
"camelcase"
],
"include": [
"ban-untagged-todo"
],
"tags": []
}
}
}

View file

@ -15,6 +15,7 @@ use crate::diff::diff;
use crate::file_watcher; use crate::file_watcher;
use crate::file_watcher::ResolutionResult; use crate::file_watcher::ResolutionResult;
use crate::flags::FmtFlags; use crate::flags::FmtFlags;
use crate::fs_util::specifier_to_file_path;
use crate::fs_util::{collect_files, get_extension, is_supported_ext_fmt}; use crate::fs_util::{collect_files, get_extension, is_supported_ext_fmt};
use crate::text_encoding; use crate::text_encoding;
use deno_ast::ParsedSource; use deno_ast::ParsedSource;
@ -55,11 +56,21 @@ pub async fn format(
if let Some(fmt_config) = maybe_fmt_config.as_ref() { if let Some(fmt_config) = maybe_fmt_config.as_ref() {
if include_files.is_empty() { if include_files.is_empty() {
include_files = fmt_config.files.include.clone(); include_files = fmt_config
.files
.include
.iter()
.filter_map(|s| specifier_to_file_path(s).ok())
.collect::<Vec<_>>();
} }
if exclude_files.is_empty() { if exclude_files.is_empty() {
exclude_files = fmt_config.files.exclude.clone(); exclude_files = fmt_config
.files
.exclude
.iter()
.filter_map(|s| specifier_to_file_path(s).ok())
.collect::<Vec<_>>();
} }
} }

View file

@ -10,7 +10,7 @@ use crate::config_file::LintConfig;
use crate::file_watcher::ResolutionResult; use crate::file_watcher::ResolutionResult;
use crate::flags::LintFlags; use crate::flags::LintFlags;
use crate::fmt_errors; use crate::fmt_errors;
use crate::fs_util::{collect_files, is_supported_ext}; use crate::fs_util::{collect_files, is_supported_ext, specifier_to_file_path};
use crate::tools::fmt::run_parallelized; use crate::tools::fmt::run_parallelized;
use crate::{colors, file_watcher}; use crate::{colors, file_watcher};
use deno_ast::MediaType; use deno_ast::MediaType;
@ -71,11 +71,21 @@ pub async fn lint(
if let Some(lint_config) = maybe_lint_config.as_ref() { if let Some(lint_config) = maybe_lint_config.as_ref() {
if include_files.is_empty() { if include_files.is_empty() {
include_files = lint_config.files.include.clone(); include_files = lint_config
.files
.include
.iter()
.filter_map(|s| specifier_to_file_path(s).ok())
.collect::<Vec<_>>();
} }
if exclude_files.is_empty() { if exclude_files.is_empty() {
exclude_files = lint_config.files.exclude.clone(); exclude_files = lint_config
.files
.exclude
.iter()
.filter_map(|s| specifier_to_file_path(s).ok())
.collect::<Vec<_>>();
} }
} }