2023-01-02 16:00:42 -05:00
|
|
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
2021-01-22 05:03:16 -05:00
|
|
|
|
2022-02-01 21:04:26 -05:00
|
|
|
use super::cache::calculate_fs_version;
|
2021-01-22 05:03:16 -05:00
|
|
|
use super::text::LineIndex;
|
2021-06-04 17:31:44 -04:00
|
|
|
use super::tsc;
|
2021-11-12 11:42:04 -05:00
|
|
|
use super::tsc::AssetDocument;
|
2021-01-22 05:03:16 -05:00
|
|
|
|
2023-02-23 10:58:10 -05:00
|
|
|
use crate::args::package_json;
|
2023-03-03 17:27:05 -05:00
|
|
|
use crate::args::package_json::PackageJsonDeps;
|
2022-06-27 16:54:09 -04:00
|
|
|
use crate::args::ConfigFile;
|
2023-01-28 10:18:32 -05:00
|
|
|
use crate::args::JsxImportSourceConfig;
|
2022-11-28 17:28:54 -05:00
|
|
|
use crate::cache::CachedUrlMetadata;
|
2023-01-28 10:18:32 -05:00
|
|
|
use crate::cache::FastInsecureHasher;
|
2022-11-28 17:28:54 -05:00
|
|
|
use crate::cache::HttpCache;
|
2021-10-28 19:56:01 -04:00
|
|
|
use crate::file_fetcher::get_source_from_bytes;
|
|
|
|
use crate::file_fetcher::map_content_type;
|
|
|
|
use crate::file_fetcher::SUPPORTED_SCHEMES;
|
2023-04-01 15:10:30 -04:00
|
|
|
use crate::lsp::logging::lsp_warn;
|
2023-04-12 08:36:11 -04:00
|
|
|
use crate::npm::CliNpmRegistryApi;
|
2023-02-22 14:15:25 -05:00
|
|
|
use crate::npm::NpmResolution;
|
2023-02-24 19:35:43 -05:00
|
|
|
use crate::npm::PackageJsonDepsInstaller;
|
2023-02-15 11:30:54 -05:00
|
|
|
use crate::resolver::CliGraphResolver;
|
2022-11-28 17:28:54 -05:00
|
|
|
use crate::util::path::specifier_to_file_path;
|
|
|
|
use crate::util::text_encoding;
|
2021-10-28 19:56:01 -04:00
|
|
|
|
2021-09-07 10:39:32 -04:00
|
|
|
use deno_ast::MediaType;
|
2022-08-22 12:14:59 -04:00
|
|
|
use deno_ast::ParsedSource;
|
2021-10-28 19:56:01 -04:00
|
|
|
use deno_ast::SourceTextInfo;
|
2021-01-22 05:03:16 -05:00
|
|
|
use deno_core::error::custom_error;
|
|
|
|
use deno_core::error::AnyError;
|
2022-10-20 13:23:21 -04:00
|
|
|
use deno_core::futures::future;
|
2021-10-28 19:56:01 -04:00
|
|
|
use deno_core::parking_lot::Mutex;
|
|
|
|
use deno_core::url;
|
2021-01-22 05:03:16 -05:00
|
|
|
use deno_core::ModuleSpecifier;
|
2022-08-09 17:27:22 -04:00
|
|
|
use deno_graph::GraphImport;
|
2023-02-09 22:00:23 -05:00
|
|
|
use deno_graph::Resolution;
|
2023-04-21 21:02:46 -04:00
|
|
|
use deno_runtime::deno_node;
|
|
|
|
use deno_runtime::deno_node::NodeResolution;
|
2022-11-30 18:07:32 -05:00
|
|
|
use deno_runtime::deno_node::NodeResolutionMode;
|
2023-04-24 19:44:35 -04:00
|
|
|
use deno_runtime::deno_node::NodeResolver;
|
2023-02-23 10:58:10 -05:00
|
|
|
use deno_runtime::deno_node::PackageJson;
|
2023-01-10 08:35:44 -05:00
|
|
|
use deno_runtime::permissions::PermissionsContainer;
|
2023-04-06 18:46:44 -04:00
|
|
|
use deno_semver::npm::NpmPackageReq;
|
|
|
|
use deno_semver::npm::NpmPackageReqReference;
|
2023-02-22 14:15:25 -05:00
|
|
|
use indexmap::IndexMap;
|
2023-03-30 17:47:53 -04:00
|
|
|
use lsp::Url;
|
2021-12-18 16:14:42 -05:00
|
|
|
use once_cell::sync::Lazy;
|
2023-05-10 20:06:59 -04:00
|
|
|
use package_json::PackageJsonDepsProvider;
|
2023-04-06 18:46:44 -04:00
|
|
|
use std::collections::BTreeMap;
|
2021-01-22 05:03:16 -05:00
|
|
|
use std::collections::HashMap;
|
2021-06-03 07:13:53 -04:00
|
|
|
use std::collections::HashSet;
|
2022-11-29 19:32:18 -05:00
|
|
|
use std::collections::VecDeque;
|
2021-10-28 19:56:01 -04:00
|
|
|
use std::fs;
|
2023-03-30 17:47:53 -04:00
|
|
|
use std::fs::ReadDir;
|
2021-01-22 05:03:16 -05:00
|
|
|
use std::ops::Range;
|
2021-10-28 19:56:01 -04:00
|
|
|
use std::path::Path;
|
|
|
|
use std::path::PathBuf;
|
2021-06-02 06:29:58 -04:00
|
|
|
use std::str::FromStr;
|
2021-09-07 10:39:32 -04:00
|
|
|
use std::sync::Arc;
|
2022-04-03 00:17:30 -04:00
|
|
|
use tower_lsp::lsp_types as lsp;
|
2021-10-28 19:56:01 -04:00
|
|
|
|
2021-12-18 16:14:42 -05:00
|
|
|
static JS_HEADERS: Lazy<HashMap<String, String>> = Lazy::new(|| {
|
|
|
|
([(
|
|
|
|
"content-type".to_string(),
|
|
|
|
"application/javascript".to_string(),
|
|
|
|
)])
|
2023-01-16 15:27:41 -05:00
|
|
|
.into_iter()
|
2021-12-18 16:14:42 -05:00
|
|
|
.collect()
|
|
|
|
});
|
|
|
|
|
|
|
|
static JSX_HEADERS: Lazy<HashMap<String, String>> = Lazy::new(|| {
|
|
|
|
([("content-type".to_string(), "text/jsx".to_string())])
|
2023-01-16 15:27:41 -05:00
|
|
|
.into_iter()
|
2021-12-18 16:14:42 -05:00
|
|
|
.collect()
|
|
|
|
});
|
|
|
|
|
|
|
|
static TS_HEADERS: Lazy<HashMap<String, String>> = Lazy::new(|| {
|
|
|
|
([(
|
|
|
|
"content-type".to_string(),
|
|
|
|
"application/typescript".to_string(),
|
|
|
|
)])
|
2023-01-16 15:27:41 -05:00
|
|
|
.into_iter()
|
2021-12-18 16:14:42 -05:00
|
|
|
.collect()
|
|
|
|
});
|
|
|
|
|
|
|
|
static TSX_HEADERS: Lazy<HashMap<String, String>> = Lazy::new(|| {
|
|
|
|
([("content-type".to_string(), "text/tsx".to_string())])
|
2023-01-16 15:27:41 -05:00
|
|
|
.into_iter()
|
2021-12-18 16:14:42 -05:00
|
|
|
.collect()
|
|
|
|
});
|
2021-06-02 06:29:58 -04:00
|
|
|
|
2022-12-20 15:19:35 -05:00
|
|
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
2022-03-23 09:54:22 -04:00
|
|
|
pub enum LanguageId {
|
2021-06-02 06:29:58 -04:00
|
|
|
JavaScript,
|
|
|
|
Jsx,
|
|
|
|
TypeScript,
|
|
|
|
Tsx,
|
|
|
|
Json,
|
|
|
|
JsonC,
|
|
|
|
Markdown,
|
2021-08-18 23:19:12 -04:00
|
|
|
Unknown,
|
2021-06-02 06:29:58 -04:00
|
|
|
}
|
|
|
|
|
2021-10-28 19:56:01 -04:00
|
|
|
impl LanguageId {
|
2022-12-20 15:19:35 -05:00
|
|
|
pub fn as_media_type(&self) -> MediaType {
|
|
|
|
match self {
|
|
|
|
LanguageId::JavaScript => MediaType::JavaScript,
|
|
|
|
LanguageId::Jsx => MediaType::Jsx,
|
|
|
|
LanguageId::TypeScript => MediaType::TypeScript,
|
|
|
|
LanguageId::Tsx => MediaType::Tsx,
|
|
|
|
LanguageId::Json => MediaType::Json,
|
|
|
|
LanguageId::JsonC => MediaType::Json,
|
|
|
|
LanguageId::Markdown | LanguageId::Unknown => MediaType::Unknown,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn as_extension(&self) -> Option<&'static str> {
|
|
|
|
match self {
|
|
|
|
LanguageId::JavaScript => Some("js"),
|
|
|
|
LanguageId::Jsx => Some("jsx"),
|
|
|
|
LanguageId::TypeScript => Some("ts"),
|
|
|
|
LanguageId::Tsx => Some("tsx"),
|
|
|
|
LanguageId::Json => Some("json"),
|
|
|
|
LanguageId::JsonC => Some("jsonc"),
|
|
|
|
LanguageId::Markdown => Some("md"),
|
|
|
|
LanguageId::Unknown => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-28 19:56:01 -04:00
|
|
|
fn as_headers(&self) -> Option<&HashMap<String, String>> {
|
|
|
|
match self {
|
|
|
|
Self::JavaScript => Some(&JS_HEADERS),
|
|
|
|
Self::Jsx => Some(&JSX_HEADERS),
|
|
|
|
Self::TypeScript => Some(&TS_HEADERS),
|
|
|
|
Self::Tsx => Some(&TSX_HEADERS),
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_diagnosable(&self) -> bool {
|
|
|
|
matches!(
|
|
|
|
self,
|
|
|
|
Self::JavaScript | Self::Jsx | Self::TypeScript | Self::Tsx
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-02 06:29:58 -04:00
|
|
|
impl FromStr for LanguageId {
|
|
|
|
type Err = AnyError;
|
|
|
|
|
2021-08-18 23:19:12 -04:00
|
|
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
2021-06-02 06:29:58 -04:00
|
|
|
match s {
|
|
|
|
"javascript" => Ok(Self::JavaScript),
|
2021-10-28 19:56:01 -04:00
|
|
|
"javascriptreact" | "jsx" => Ok(Self::Jsx),
|
2021-06-02 06:29:58 -04:00
|
|
|
"typescript" => Ok(Self::TypeScript),
|
2021-10-28 19:56:01 -04:00
|
|
|
"typescriptreact" | "tsx" => Ok(Self::Tsx),
|
2021-06-02 06:29:58 -04:00
|
|
|
"json" => Ok(Self::Json),
|
|
|
|
"jsonc" => Ok(Self::JsonC),
|
|
|
|
"markdown" => Ok(Self::Markdown),
|
2021-08-18 23:19:12 -04:00
|
|
|
_ => Ok(Self::Unknown),
|
2021-06-02 06:29:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
|
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
|
|
|
enum IndexValid {
|
|
|
|
All,
|
|
|
|
UpTo(u32),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IndexValid {
|
|
|
|
fn covers(&self, line: u32) -> bool {
|
|
|
|
match *self {
|
|
|
|
IndexValid::UpTo(to) => to > line,
|
|
|
|
IndexValid::All => true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
#[derive(Debug, Clone)]
|
2022-03-02 16:06:38 -05:00
|
|
|
pub enum AssetOrDocument {
|
2021-11-12 11:42:04 -05:00
|
|
|
Document(Document),
|
|
|
|
Asset(AssetDocument),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AssetOrDocument {
|
2022-03-02 16:06:38 -05:00
|
|
|
pub fn specifier(&self) -> &ModuleSpecifier {
|
|
|
|
match self {
|
|
|
|
AssetOrDocument::Asset(asset) => asset.specifier(),
|
|
|
|
AssetOrDocument::Document(doc) => doc.specifier(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
pub fn document(&self) -> Option<&Document> {
|
|
|
|
match self {
|
|
|
|
AssetOrDocument::Asset(_) => None,
|
|
|
|
AssetOrDocument::Document(doc) => Some(doc),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-20 16:40:55 -04:00
|
|
|
pub fn text(&self) -> Arc<str> {
|
2021-11-12 11:42:04 -05:00
|
|
|
match self {
|
|
|
|
AssetOrDocument::Asset(a) => a.text(),
|
|
|
|
AssetOrDocument::Document(d) => d.0.text_info.text(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn line_index(&self) -> Arc<LineIndex> {
|
|
|
|
match self {
|
|
|
|
AssetOrDocument::Asset(a) => a.line_index(),
|
|
|
|
AssetOrDocument::Document(d) => d.line_index(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn maybe_navigation_tree(&self) -> Option<Arc<tsc::NavigationTree>> {
|
|
|
|
match self {
|
|
|
|
AssetOrDocument::Asset(a) => a.maybe_navigation_tree(),
|
|
|
|
AssetOrDocument::Document(d) => d.maybe_navigation_tree(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-15 14:41:37 -04:00
|
|
|
pub fn media_type(&self) -> MediaType {
|
|
|
|
match self {
|
|
|
|
AssetOrDocument::Asset(_) => MediaType::TypeScript, // assets are always TypeScript
|
|
|
|
AssetOrDocument::Document(d) => d.media_type(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
pub fn get_maybe_dependency(
|
|
|
|
&self,
|
|
|
|
position: &lsp::Position,
|
|
|
|
) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> {
|
|
|
|
self
|
|
|
|
.document()
|
2022-02-24 20:03:12 -05:00
|
|
|
.and_then(|d| d.get_maybe_dependency(position))
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn maybe_parsed_source(
|
|
|
|
&self,
|
2022-08-22 12:14:59 -04:00
|
|
|
) -> Option<Result<deno_ast::ParsedSource, deno_ast::Diagnostic>> {
|
2022-02-24 20:03:12 -05:00
|
|
|
self.document().and_then(|d| d.maybe_parsed_source())
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
pub fn document_lsp_version(&self) -> Option<i32> {
|
2022-02-24 20:03:12 -05:00
|
|
|
self.document().and_then(|d| d.maybe_lsp_version())
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
2022-03-02 16:06:38 -05:00
|
|
|
|
|
|
|
pub fn is_open(&self) -> bool {
|
|
|
|
self.document().map(|d| d.is_open()).unwrap_or(false)
|
|
|
|
}
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
2022-10-21 11:20:18 -04:00
|
|
|
#[derive(Debug, Default)]
|
|
|
|
struct DocumentDependencies {
|
2023-02-22 14:15:25 -05:00
|
|
|
deps: IndexMap<String, deno_graph::Dependency>,
|
2023-02-09 22:00:23 -05:00
|
|
|
maybe_types_dependency: Option<deno_graph::TypesDependency>,
|
2022-10-21 11:20:18 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
impl DocumentDependencies {
|
2023-01-28 10:18:32 -05:00
|
|
|
pub fn from_maybe_module(maybe_module: &Option<ModuleResult>) -> Self {
|
2022-10-21 11:20:18 -04:00
|
|
|
if let Some(Ok(module)) = &maybe_module {
|
|
|
|
Self::from_module(module)
|
|
|
|
} else {
|
|
|
|
Self::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-22 14:15:25 -05:00
|
|
|
pub fn from_module(module: &deno_graph::EsmModule) -> Self {
|
2022-10-21 11:20:18 -04:00
|
|
|
Self {
|
|
|
|
deps: module.dependencies.clone(),
|
|
|
|
maybe_types_dependency: module.maybe_types_dependency.clone(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-22 14:15:25 -05:00
|
|
|
type ModuleResult = Result<deno_graph::EsmModule, deno_graph::ModuleGraphError>;
|
2023-01-28 10:18:32 -05:00
|
|
|
type ParsedSourceResult = Result<ParsedSource, deno_ast::Diagnostic>;
|
2022-08-22 12:14:59 -04:00
|
|
|
|
2022-12-19 20:22:17 -05:00
|
|
|
#[derive(Debug)]
|
2021-11-12 11:42:04 -05:00
|
|
|
struct DocumentInner {
|
2022-12-19 20:22:17 -05:00
|
|
|
/// Contains the last-known-good set of dependencies from parsing the module.
|
2022-10-21 11:20:18 -04:00
|
|
|
dependencies: Arc<DocumentDependencies>,
|
2021-11-12 11:42:04 -05:00
|
|
|
fs_version: String,
|
2021-10-28 19:56:01 -04:00
|
|
|
line_index: Arc<LineIndex>,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_headers: Option<HashMap<String, String>>,
|
2021-10-28 19:56:01 -04:00
|
|
|
maybe_language_id: Option<LanguageId>,
|
|
|
|
maybe_lsp_version: Option<i32>,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_module: Option<ModuleResult>,
|
2022-12-19 20:22:17 -05:00
|
|
|
// this is a lazily constructed value based on the state of the document,
|
|
|
|
// so having a mutex to hold it is ok
|
|
|
|
maybe_navigation_tree: Mutex<Option<Arc<tsc::NavigationTree>>>,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_parsed_source: Option<ParsedSourceResult>,
|
2021-03-24 20:13:37 -04:00
|
|
|
specifier: ModuleSpecifier,
|
2021-11-12 11:42:04 -05:00
|
|
|
text_info: SourceTextInfo,
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
#[derive(Debug, Clone)]
|
2022-03-02 16:06:38 -05:00
|
|
|
pub struct Document(Arc<DocumentInner>);
|
2021-11-12 11:42:04 -05:00
|
|
|
|
2021-10-28 19:56:01 -04:00
|
|
|
impl Document {
|
|
|
|
fn new(
|
2021-06-02 06:29:58 -04:00
|
|
|
specifier: ModuleSpecifier,
|
2021-11-12 11:42:04 -05:00
|
|
|
fs_version: String,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_headers: Option<HashMap<String, String>>,
|
|
|
|
text_info: SourceTextInfo,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2021-06-02 06:29:58 -04:00
|
|
|
) -> Self {
|
2021-10-28 19:56:01 -04:00
|
|
|
// we only ever do `Document::new` on on disk resources that are supposed to
|
|
|
|
// be diagnosable, unlike `Document::open`, so it is safe to unconditionally
|
|
|
|
// parse the module.
|
2023-01-28 10:18:32 -05:00
|
|
|
let (maybe_parsed_source, maybe_module) = parse_and_analyze_module(
|
2021-10-28 19:56:01 -04:00
|
|
|
&specifier,
|
2023-01-28 10:18:32 -05:00
|
|
|
text_info.clone(),
|
|
|
|
maybe_headers.as_ref(),
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2022-08-22 12:14:59 -04:00
|
|
|
);
|
2022-10-21 11:20:18 -04:00
|
|
|
let dependencies =
|
|
|
|
Arc::new(DocumentDependencies::from_maybe_module(&maybe_module));
|
2021-11-12 11:42:04 -05:00
|
|
|
let line_index = Arc::new(LineIndex::new(text_info.text_str()));
|
|
|
|
Self(Arc::new(DocumentInner {
|
2021-11-16 17:23:25 -05:00
|
|
|
dependencies,
|
2021-11-12 11:42:04 -05:00
|
|
|
fs_version,
|
2021-10-28 19:56:01 -04:00
|
|
|
line_index,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_headers,
|
2021-10-28 19:56:01 -04:00
|
|
|
maybe_language_id: None,
|
|
|
|
maybe_lsp_version: None,
|
|
|
|
maybe_module,
|
2022-12-19 20:22:17 -05:00
|
|
|
maybe_navigation_tree: Mutex::new(None),
|
2022-08-22 12:14:59 -04:00
|
|
|
maybe_parsed_source,
|
2021-11-12 11:42:04 -05:00
|
|
|
text_info,
|
2021-03-24 20:13:37 -04:00
|
|
|
specifier,
|
2021-11-12 11:42:04 -05:00
|
|
|
}))
|
2021-03-24 20:13:37 -04:00
|
|
|
}
|
|
|
|
|
2023-01-28 10:18:32 -05:00
|
|
|
fn maybe_with_new_resolver(
|
|
|
|
&self,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2023-01-28 10:18:32 -05:00
|
|
|
) -> Option<Self> {
|
|
|
|
let parsed_source_result = match &self.0.maybe_parsed_source {
|
|
|
|
Some(parsed_source_result) => parsed_source_result.clone(),
|
|
|
|
None => return None, // nothing to change
|
|
|
|
};
|
|
|
|
let maybe_module = Some(analyze_module(
|
|
|
|
&self.0.specifier,
|
|
|
|
&parsed_source_result,
|
|
|
|
self.0.maybe_headers.as_ref(),
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2023-01-28 10:18:32 -05:00
|
|
|
));
|
|
|
|
let dependencies =
|
|
|
|
Arc::new(DocumentDependencies::from_maybe_module(&maybe_module));
|
|
|
|
Some(Self(Arc::new(DocumentInner {
|
|
|
|
// updated properties
|
|
|
|
dependencies,
|
|
|
|
maybe_module,
|
|
|
|
maybe_navigation_tree: Mutex::new(None),
|
|
|
|
maybe_parsed_source: Some(parsed_source_result),
|
|
|
|
// maintain - this should all be copies/clones
|
|
|
|
fs_version: self.0.fs_version.clone(),
|
|
|
|
line_index: self.0.line_index.clone(),
|
|
|
|
maybe_headers: self.0.maybe_headers.clone(),
|
|
|
|
maybe_language_id: self.0.maybe_language_id,
|
|
|
|
maybe_lsp_version: self.0.maybe_lsp_version,
|
|
|
|
text_info: self.0.text_info.clone(),
|
|
|
|
specifier: self.0.specifier.clone(),
|
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
fn open(
|
|
|
|
specifier: ModuleSpecifier,
|
|
|
|
version: i32,
|
|
|
|
language_id: LanguageId,
|
2022-05-20 16:40:55 -04:00
|
|
|
content: Arc<str>,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2021-11-12 11:42:04 -05:00
|
|
|
) -> Self {
|
|
|
|
let maybe_headers = language_id.as_headers();
|
2023-01-28 10:18:32 -05:00
|
|
|
let text_info = SourceTextInfo::new(content);
|
|
|
|
let (maybe_parsed_source, maybe_module) = if language_id.is_diagnosable() {
|
|
|
|
parse_and_analyze_module(
|
2021-11-12 11:42:04 -05:00
|
|
|
&specifier,
|
2023-01-28 10:18:32 -05:00
|
|
|
text_info.clone(),
|
2022-08-22 12:14:59 -04:00
|
|
|
maybe_headers,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2022-08-22 12:14:59 -04:00
|
|
|
)
|
2021-11-12 11:42:04 -05:00
|
|
|
} else {
|
2022-08-22 12:14:59 -04:00
|
|
|
(None, None)
|
2021-11-12 11:42:04 -05:00
|
|
|
};
|
2022-10-21 11:20:18 -04:00
|
|
|
let dependencies =
|
|
|
|
Arc::new(DocumentDependencies::from_maybe_module(&maybe_module));
|
2023-01-28 10:18:32 -05:00
|
|
|
let line_index = Arc::new(LineIndex::new(text_info.text_str()));
|
2021-11-12 11:42:04 -05:00
|
|
|
Self(Arc::new(DocumentInner {
|
2021-11-16 17:23:25 -05:00
|
|
|
dependencies,
|
2021-11-12 11:42:04 -05:00
|
|
|
fs_version: "1".to_string(),
|
|
|
|
line_index,
|
|
|
|
maybe_language_id: Some(language_id),
|
|
|
|
maybe_lsp_version: Some(version),
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_headers: maybe_headers.map(ToOwned::to_owned),
|
2021-11-12 11:42:04 -05:00
|
|
|
maybe_module,
|
2022-12-19 20:22:17 -05:00
|
|
|
maybe_navigation_tree: Mutex::new(None),
|
2022-08-22 12:14:59 -04:00
|
|
|
maybe_parsed_source,
|
2023-01-28 10:18:32 -05:00
|
|
|
text_info,
|
2021-11-12 11:42:04 -05:00
|
|
|
specifier,
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn with_change(
|
|
|
|
&self,
|
2021-10-28 19:56:01 -04:00
|
|
|
version: i32,
|
|
|
|
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2021-11-12 11:42:04 -05:00
|
|
|
) -> Result<Document, AnyError> {
|
|
|
|
let mut content = self.0.text_info.text_str().to_string();
|
|
|
|
let mut line_index = self.0.line_index.clone();
|
2021-01-22 05:03:16 -05:00
|
|
|
let mut index_valid = IndexValid::All;
|
2021-10-28 19:56:01 -04:00
|
|
|
for change in changes {
|
2021-01-22 05:03:16 -05:00
|
|
|
if let Some(range) = change.range {
|
|
|
|
if !index_valid.covers(range.start.line) {
|
2021-10-28 19:56:01 -04:00
|
|
|
line_index = Arc::new(LineIndex::new(&content));
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
index_valid = IndexValid::UpTo(range.start.line);
|
|
|
|
let range = line_index.get_text_range(range)?;
|
|
|
|
content.replace_range(Range::<usize>::from(range), &change.text);
|
|
|
|
} else {
|
2021-09-07 10:39:32 -04:00
|
|
|
content = change.text;
|
2021-01-22 05:03:16 -05:00
|
|
|
index_valid = IndexValid::UpTo(0);
|
|
|
|
}
|
|
|
|
}
|
2023-01-28 10:18:32 -05:00
|
|
|
let text_info = SourceTextInfo::from_string(content);
|
|
|
|
let (maybe_parsed_source, maybe_module) = if self
|
2021-11-12 11:42:04 -05:00
|
|
|
.0
|
2021-10-28 19:56:01 -04:00
|
|
|
.maybe_language_id
|
|
|
|
.as_ref()
|
|
|
|
.map(|li| li.is_diagnosable())
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
|
|
|
let maybe_headers = self
|
2021-11-12 11:42:04 -05:00
|
|
|
.0
|
2021-10-28 19:56:01 -04:00
|
|
|
.maybe_language_id
|
|
|
|
.as_ref()
|
2022-02-24 20:03:12 -05:00
|
|
|
.and_then(|li| li.as_headers());
|
2023-01-28 10:18:32 -05:00
|
|
|
parse_and_analyze_module(
|
2021-11-12 11:42:04 -05:00
|
|
|
&self.0.specifier,
|
2023-01-28 10:18:32 -05:00
|
|
|
text_info.clone(),
|
2022-08-22 12:14:59 -04:00
|
|
|
maybe_headers,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2022-08-22 12:14:59 -04:00
|
|
|
)
|
2021-10-28 19:56:01 -04:00
|
|
|
} else {
|
2022-08-22 12:14:59 -04:00
|
|
|
(None, None)
|
2021-11-12 11:42:04 -05:00
|
|
|
};
|
2021-11-16 17:23:25 -05:00
|
|
|
let dependencies = if let Some(Ok(module)) = &maybe_module {
|
2022-10-21 11:20:18 -04:00
|
|
|
Arc::new(DocumentDependencies::from_module(module))
|
2021-11-16 17:23:25 -05:00
|
|
|
} else {
|
2022-10-21 11:20:18 -04:00
|
|
|
self.0.dependencies.clone() // use the last known good
|
2021-11-16 17:23:25 -05:00
|
|
|
};
|
2021-11-12 11:42:04 -05:00
|
|
|
let line_index = if index_valid == IndexValid::All {
|
2021-09-07 10:39:32 -04:00
|
|
|
line_index
|
2021-01-22 05:03:16 -05:00
|
|
|
} else {
|
2021-11-16 17:23:25 -05:00
|
|
|
Arc::new(LineIndex::new(text_info.text_str()))
|
2021-01-22 05:03:16 -05:00
|
|
|
};
|
2021-11-12 11:42:04 -05:00
|
|
|
Ok(Document(Arc::new(DocumentInner {
|
2022-12-19 20:22:17 -05:00
|
|
|
specifier: self.0.specifier.clone(),
|
|
|
|
fs_version: self.0.fs_version.clone(),
|
2022-12-20 15:19:35 -05:00
|
|
|
maybe_language_id: self.0.maybe_language_id,
|
2021-11-16 17:23:25 -05:00
|
|
|
dependencies,
|
|
|
|
text_info,
|
2021-11-12 11:42:04 -05:00
|
|
|
line_index,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_headers: self.0.maybe_headers.clone(),
|
2021-11-12 11:42:04 -05:00
|
|
|
maybe_module,
|
2022-08-22 12:14:59 -04:00
|
|
|
maybe_parsed_source,
|
2021-11-12 11:42:04 -05:00
|
|
|
maybe_lsp_version: Some(version),
|
2022-12-19 20:22:17 -05:00
|
|
|
maybe_navigation_tree: Mutex::new(None),
|
2021-11-12 11:42:04 -05:00
|
|
|
})))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn specifier(&self) -> &ModuleSpecifier {
|
|
|
|
&self.0.specifier
|
2021-06-21 02:43:35 -04:00
|
|
|
}
|
2021-06-24 19:06:51 -04:00
|
|
|
|
2022-05-20 16:40:55 -04:00
|
|
|
pub fn content(&self) -> Arc<str> {
|
2021-11-12 11:42:04 -05:00
|
|
|
self.0.text_info.text()
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
pub fn text_info(&self) -> SourceTextInfo {
|
|
|
|
self.0.text_info.clone()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn line_index(&self) -> Arc<LineIndex> {
|
|
|
|
self.0.line_index.clone()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn fs_version(&self) -> &str {
|
|
|
|
self.0.fs_version.as_str()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn script_version(&self) -> String {
|
|
|
|
self
|
|
|
|
.maybe_lsp_version()
|
2023-03-15 17:46:36 -04:00
|
|
|
.map(|v| v.to_string())
|
|
|
|
.unwrap_or_else(|| self.fs_version().to_string())
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn is_diagnosable(&self) -> bool {
|
2021-10-28 19:56:01 -04:00
|
|
|
matches!(
|
|
|
|
self.media_type(),
|
|
|
|
MediaType::JavaScript
|
|
|
|
| MediaType::Jsx
|
2021-12-09 17:12:21 -05:00
|
|
|
| MediaType::Mjs
|
|
|
|
| MediaType::Cjs
|
2021-10-28 19:56:01 -04:00
|
|
|
| MediaType::TypeScript
|
|
|
|
| MediaType::Tsx
|
2021-12-09 17:12:21 -05:00
|
|
|
| MediaType::Mts
|
|
|
|
| MediaType::Cts
|
2021-10-28 19:56:01 -04:00
|
|
|
| MediaType::Dts
|
2021-12-09 17:12:21 -05:00
|
|
|
| MediaType::Dmts
|
|
|
|
| MediaType::Dcts
|
2021-10-28 19:56:01 -04:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
pub fn is_open(&self) -> bool {
|
|
|
|
self.0.maybe_lsp_version.is_some()
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2023-02-09 22:00:23 -05:00
|
|
|
pub fn maybe_types_dependency(&self) -> Resolution {
|
|
|
|
if let Some(types_dep) = self.0.dependencies.maybe_types_dependency.as_ref()
|
2022-10-21 11:20:18 -04:00
|
|
|
{
|
2023-02-09 22:00:23 -05:00
|
|
|
types_dep.dependency.clone()
|
2022-01-31 17:33:57 -05:00
|
|
|
} else {
|
2023-02-09 22:00:23 -05:00
|
|
|
Resolution::None
|
2022-01-31 17:33:57 -05:00
|
|
|
}
|
2021-11-07 19:50:48 -05:00
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
pub fn media_type(&self) -> MediaType {
|
|
|
|
if let Some(Ok(module)) = &self.0.maybe_module {
|
2022-12-20 15:19:35 -05:00
|
|
|
return module.media_type;
|
|
|
|
}
|
2023-03-21 11:46:40 -04:00
|
|
|
let specifier_media_type = MediaType::from_specifier(&self.0.specifier);
|
2022-12-20 15:19:35 -05:00
|
|
|
if specifier_media_type != MediaType::Unknown {
|
|
|
|
return specifier_media_type;
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
2022-12-20 15:19:35 -05:00
|
|
|
|
|
|
|
self
|
|
|
|
.0
|
|
|
|
.maybe_language_id
|
|
|
|
.map(|id| id.as_media_type())
|
|
|
|
.unwrap_or(MediaType::Unknown)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn maybe_language_id(&self) -> Option<LanguageId> {
|
|
|
|
self.0.maybe_language_id
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-12 11:42:04 -05:00
|
|
|
/// Returns the current language server client version if any.
|
|
|
|
pub fn maybe_lsp_version(&self) -> Option<i32> {
|
|
|
|
self.0.maybe_lsp_version
|
|
|
|
}
|
|
|
|
|
2023-02-22 14:15:25 -05:00
|
|
|
fn maybe_esm_module(&self) -> Option<&ModuleResult> {
|
2021-11-12 11:42:04 -05:00
|
|
|
self.0.maybe_module.as_ref()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn maybe_parsed_source(
|
|
|
|
&self,
|
2022-08-22 12:14:59 -04:00
|
|
|
) -> Option<Result<deno_ast::ParsedSource, deno_ast::Diagnostic>> {
|
|
|
|
self.0.maybe_parsed_source.clone()
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn maybe_navigation_tree(&self) -> Option<Arc<tsc::NavigationTree>> {
|
2022-12-19 20:22:17 -05:00
|
|
|
self.0.maybe_navigation_tree.lock().clone()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn update_navigation_tree_if_version(
|
|
|
|
&self,
|
|
|
|
tree: Arc<tsc::NavigationTree>,
|
|
|
|
script_version: &str,
|
|
|
|
) {
|
|
|
|
// Ensure we are updating the same document that the navigation tree was
|
|
|
|
// created for. Note: this should not be racy between the version check
|
|
|
|
// and setting the navigation tree, because the document is immutable
|
|
|
|
// and this is enforced by it being wrapped in an Arc.
|
|
|
|
if self.script_version() == script_version {
|
|
|
|
*self.0.maybe_navigation_tree.lock() = Some(tree);
|
|
|
|
}
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
2023-02-22 14:15:25 -05:00
|
|
|
pub fn dependencies(&self) -> &IndexMap<String, deno_graph::Dependency> {
|
2022-10-21 11:20:18 -04:00
|
|
|
&self.0.dependencies.deps
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// If the supplied position is within a dependency range, return the resolved
|
|
|
|
/// string specifier for the dependency, the resolved dependency and the range
|
|
|
|
/// in the source document of the specifier.
|
|
|
|
pub fn get_maybe_dependency(
|
|
|
|
&self,
|
|
|
|
position: &lsp::Position,
|
|
|
|
) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> {
|
2023-02-22 14:15:25 -05:00
|
|
|
let module = self.maybe_esm_module()?.as_ref().ok()?;
|
2021-11-12 11:42:04 -05:00
|
|
|
let position = deno_graph::Position {
|
|
|
|
line: position.line as usize,
|
|
|
|
character: position.character as usize,
|
2021-10-28 19:56:01 -04:00
|
|
|
};
|
2021-11-12 11:42:04 -05:00
|
|
|
module.dependencies.iter().find_map(|(s, dep)| {
|
|
|
|
dep
|
|
|
|
.includes(&position)
|
|
|
|
.map(|r| (s.clone(), dep.clone(), r.clone()))
|
|
|
|
})
|
2021-06-24 19:06:51 -04:00
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2023-02-09 22:00:23 -05:00
|
|
|
pub fn to_hover_text(result: &Resolution) -> String {
|
2021-10-28 19:56:01 -04:00
|
|
|
match result {
|
2023-02-09 22:00:23 -05:00
|
|
|
Resolution::Ok(resolved) => {
|
|
|
|
let specifier = &resolved.specifier;
|
|
|
|
match specifier.scheme() {
|
|
|
|
"data" => "_(a data url)_".to_string(),
|
|
|
|
"blob" => "_(a blob url)_".to_string(),
|
|
|
|
_ => format!(
|
|
|
|
"{}​{}",
|
|
|
|
&specifier[..url::Position::AfterScheme],
|
|
|
|
&specifier[url::Position::AfterScheme..],
|
|
|
|
)
|
|
|
|
.replace('@', "​@"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Resolution::Err(_) => "_[errored]_".to_string(),
|
|
|
|
Resolution::None => "_[missing]_".to_string(),
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2022-03-23 09:54:22 -04:00
|
|
|
pub fn to_lsp_range(range: &deno_graph::Range) -> lsp::Range {
|
2021-10-28 19:56:01 -04:00
|
|
|
lsp::Range {
|
|
|
|
start: lsp::Position {
|
|
|
|
line: range.start.line as u32,
|
|
|
|
character: range.start.character as u32,
|
|
|
|
},
|
|
|
|
end: lsp::Position {
|
|
|
|
line: range.end.line as u32,
|
|
|
|
character: range.end.character as u32,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Recurse and collect specifiers that appear in the dependent map.
|
|
|
|
fn recurse_dependents(
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
map: &HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>,
|
|
|
|
dependents: &mut HashSet<ModuleSpecifier>,
|
|
|
|
) {
|
|
|
|
if let Some(deps) = map.get(specifier) {
|
|
|
|
for dep in deps {
|
|
|
|
if !dependents.contains(dep) {
|
|
|
|
dependents.insert(dep.clone());
|
|
|
|
recurse_dependents(dep, map, dependents);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Default)]
|
2021-11-18 13:50:24 -05:00
|
|
|
struct SpecifierResolver {
|
2021-10-28 19:56:01 -04:00
|
|
|
cache: HttpCache,
|
2021-11-18 13:50:24 -05:00
|
|
|
redirects: Mutex<HashMap<ModuleSpecifier, ModuleSpecifier>>,
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
impl SpecifierResolver {
|
2023-06-10 11:09:45 -04:00
|
|
|
pub fn new(cache: HttpCache) -> Self {
|
2021-10-28 19:56:01 -04:00
|
|
|
Self {
|
2023-06-10 11:09:45 -04:00
|
|
|
cache,
|
2021-11-18 13:50:24 -05:00
|
|
|
redirects: Mutex::new(HashMap::new()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve(
|
|
|
|
&self,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
) -> Option<ModuleSpecifier> {
|
|
|
|
let scheme = specifier.scheme();
|
|
|
|
if !SUPPORTED_SCHEMES.contains(&scheme) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
if scheme == "data" || scheme == "blob" || scheme == "file" {
|
|
|
|
Some(specifier.clone())
|
|
|
|
} else {
|
|
|
|
let mut redirects = self.redirects.lock();
|
|
|
|
if let Some(specifier) = redirects.get(specifier) {
|
|
|
|
Some(specifier.clone())
|
|
|
|
} else {
|
|
|
|
let redirect = self.resolve_remote(specifier, 10)?;
|
|
|
|
redirects.insert(specifier.clone(), redirect.clone());
|
|
|
|
Some(redirect)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn resolve_remote(
|
|
|
|
&self,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
redirect_limit: usize,
|
|
|
|
) -> Option<ModuleSpecifier> {
|
|
|
|
let cache_filename = self.cache.get_cache_filename(specifier)?;
|
|
|
|
if redirect_limit > 0 && cache_filename.is_file() {
|
2022-11-28 17:28:54 -05:00
|
|
|
let headers = CachedUrlMetadata::read(&cache_filename)
|
2021-11-18 13:50:24 -05:00
|
|
|
.ok()
|
|
|
|
.map(|m| m.headers)?;
|
|
|
|
if let Some(location) = headers.get("location") {
|
|
|
|
let redirect =
|
|
|
|
deno_core::resolve_import(location, specifier.as_str()).ok()?;
|
|
|
|
self.resolve_remote(&redirect, redirect_limit - 1)
|
|
|
|
} else {
|
|
|
|
Some(specifier.clone())
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
None
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Default)]
|
|
|
|
struct FileSystemDocuments {
|
|
|
|
docs: HashMap<ModuleSpecifier, Document>,
|
|
|
|
dirty: bool,
|
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
impl FileSystemDocuments {
|
2022-11-30 14:19:32 -05:00
|
|
|
pub fn get(
|
2022-11-29 19:32:18 -05:00
|
|
|
&mut self,
|
|
|
|
cache: &HttpCache,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2022-11-29 19:32:18 -05:00
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
) -> Option<Document> {
|
|
|
|
let fs_version = get_document_path(cache, specifier)
|
|
|
|
.and_then(|path| calculate_fs_version(&path));
|
|
|
|
let file_system_doc = self.docs.get(specifier);
|
|
|
|
if file_system_doc.map(|d| d.fs_version().to_string()) != fs_version {
|
|
|
|
// attempt to update the file on the file system
|
2023-02-15 11:30:54 -05:00
|
|
|
self.refresh_document(cache, resolver, specifier)
|
2022-11-29 19:32:18 -05:00
|
|
|
} else {
|
|
|
|
file_system_doc.cloned()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-25 09:21:59 -05:00
|
|
|
/// Adds or updates a document by reading the document from the file system
|
|
|
|
/// returning the document.
|
2021-11-18 13:50:24 -05:00
|
|
|
fn refresh_document(
|
|
|
|
&mut self,
|
|
|
|
cache: &HttpCache,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2022-01-24 03:01:33 -05:00
|
|
|
specifier: &ModuleSpecifier,
|
2021-11-18 13:50:24 -05:00
|
|
|
) -> Option<Document> {
|
2022-01-24 03:01:33 -05:00
|
|
|
let path = get_document_path(cache, specifier)?;
|
2021-11-18 13:50:24 -05:00
|
|
|
let fs_version = calculate_fs_version(&path)?;
|
2021-10-28 19:56:01 -04:00
|
|
|
let bytes = fs::read(path).ok()?;
|
|
|
|
let doc = if specifier.scheme() == "file" {
|
|
|
|
let maybe_charset =
|
|
|
|
Some(text_encoding::detect_charset(&bytes).to_string());
|
2022-05-20 16:40:55 -04:00
|
|
|
let content = get_source_from_bytes(bytes, maybe_charset).ok()?;
|
2021-10-28 19:56:01 -04:00
|
|
|
Document::new(
|
|
|
|
specifier.clone(),
|
2021-11-12 11:42:04 -05:00
|
|
|
fs_version,
|
2021-10-28 19:56:01 -04:00
|
|
|
None,
|
2023-01-28 10:18:32 -05:00
|
|
|
SourceTextInfo::from_string(content),
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2021-10-28 19:56:01 -04:00
|
|
|
)
|
|
|
|
} else {
|
2022-01-24 03:01:33 -05:00
|
|
|
let cache_filename = cache.get_cache_filename(specifier)?;
|
2022-11-28 17:28:54 -05:00
|
|
|
let specifier_metadata = CachedUrlMetadata::read(&cache_filename).ok()?;
|
2023-01-26 17:24:03 -05:00
|
|
|
let maybe_content_type = specifier_metadata.headers.get("content-type");
|
2022-01-24 03:01:33 -05:00
|
|
|
let (_, maybe_charset) = map_content_type(specifier, maybe_content_type);
|
2023-01-28 10:18:32 -05:00
|
|
|
let maybe_headers = Some(specifier_metadata.headers);
|
2022-05-20 16:40:55 -04:00
|
|
|
let content = get_source_from_bytes(bytes, maybe_charset).ok()?;
|
2021-10-28 19:56:01 -04:00
|
|
|
Document::new(
|
|
|
|
specifier.clone(),
|
2021-11-12 11:42:04 -05:00
|
|
|
fs_version,
|
2021-10-28 19:56:01 -04:00
|
|
|
maybe_headers,
|
2023-01-28 10:18:32 -05:00
|
|
|
SourceTextInfo::from_string(content),
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2021-10-28 19:56:01 -04:00
|
|
|
)
|
|
|
|
};
|
|
|
|
self.dirty = true;
|
2022-01-25 09:21:59 -05:00
|
|
|
self.docs.insert(specifier.clone(), doc.clone());
|
|
|
|
Some(doc)
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
fn get_document_path(
|
|
|
|
cache: &HttpCache,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
) -> Option<PathBuf> {
|
2022-12-13 07:53:32 -05:00
|
|
|
match specifier.scheme() {
|
|
|
|
"npm" | "node" => None,
|
|
|
|
"file" => specifier_to_file_path(specifier).ok(),
|
|
|
|
_ => cache.get_cache_filename(specifier),
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
pub struct UpdateDocumentConfigOptions<'a> {
|
|
|
|
pub enabled_urls: Vec<Url>,
|
|
|
|
pub document_preload_limit: usize,
|
|
|
|
pub maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
|
|
|
pub maybe_config_file: Option<&'a ConfigFile>,
|
|
|
|
pub maybe_package_json: Option<&'a PackageJson>,
|
|
|
|
pub npm_registry_api: Arc<CliNpmRegistryApi>,
|
|
|
|
pub npm_resolution: Arc<NpmResolution>,
|
|
|
|
}
|
|
|
|
|
2023-03-29 16:25:48 -04:00
|
|
|
/// Specify the documents to include on a `documents.documents(...)` call.
|
|
|
|
#[derive(Debug, Clone, Copy)]
|
|
|
|
pub enum DocumentsFilter {
|
|
|
|
/// Includes all the documents (diagnosable & non-diagnosable, open & file system).
|
|
|
|
All,
|
|
|
|
/// Includes all the diagnosable documents (open & file system).
|
|
|
|
AllDiagnosable,
|
|
|
|
/// Includes only the diagnosable documents that are open.
|
|
|
|
OpenDiagnosable,
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
#[derive(Debug, Clone, Default)]
|
2022-03-23 09:54:22 -04:00
|
|
|
pub struct Documents {
|
2021-11-18 13:50:24 -05:00
|
|
|
/// The DENO_DIR that the documents looks for non-file based modules.
|
|
|
|
cache: HttpCache,
|
|
|
|
/// A flag that indicates that stated data is potentially invalid and needs to
|
|
|
|
/// be recalculated before being considered valid.
|
|
|
|
dirty: bool,
|
|
|
|
/// A map where the key is a specifier and the value is a set of specifiers
|
|
|
|
/// that depend on the key.
|
|
|
|
dependents_map: Arc<HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>>,
|
|
|
|
/// A map of documents that are "open" in the language server.
|
|
|
|
open_docs: HashMap<ModuleSpecifier, Document>,
|
|
|
|
/// Documents stored on the file system.
|
|
|
|
file_system_docs: Arc<Mutex<FileSystemDocuments>>,
|
2023-01-28 10:18:32 -05:00
|
|
|
/// Hash of the config used for resolution. When the hash changes we update
|
|
|
|
/// dependencies.
|
|
|
|
resolver_config_hash: u64,
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Any imports to the context supplied by configuration files. This is like
|
|
|
|
/// the imports into the a module graph in CLI.
|
2023-03-11 11:43:45 -05:00
|
|
|
imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
|
2022-11-02 10:47:02 -04:00
|
|
|
/// A resolver that takes into account currently loaded import map and JSX
|
|
|
|
/// settings.
|
2023-04-14 16:22:33 -04:00
|
|
|
resolver: Arc<CliGraphResolver>,
|
2023-02-23 10:58:10 -05:00
|
|
|
/// The npm package requirements found in npm specifiers.
|
|
|
|
npm_specifier_reqs: Arc<Vec<NpmPackageReq>>,
|
2023-01-24 09:05:54 -05:00
|
|
|
/// Gets if any document had a node: specifier such that a @types/node package
|
|
|
|
/// should be injected.
|
|
|
|
has_injected_types_node_package: bool,
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Resolves a specifier to its final redirected to specifier.
|
|
|
|
specifier_resolver: Arc<SpecifierResolver>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Documents {
|
2023-06-10 11:09:45 -04:00
|
|
|
pub fn new(location: PathBuf) -> Self {
|
|
|
|
let cache = HttpCache::new(location);
|
2021-11-18 13:50:24 -05:00
|
|
|
Self {
|
2023-06-10 11:09:45 -04:00
|
|
|
cache: cache.clone(),
|
2021-11-18 13:50:24 -05:00
|
|
|
dirty: true,
|
|
|
|
dependents_map: Default::default(),
|
|
|
|
open_docs: HashMap::default(),
|
|
|
|
file_system_docs: Default::default(),
|
2023-01-28 10:18:32 -05:00
|
|
|
resolver_config_hash: 0,
|
2021-11-18 13:50:24 -05:00
|
|
|
imports: Default::default(),
|
2023-04-14 16:22:33 -04:00
|
|
|
resolver: Default::default(),
|
2023-02-23 10:58:10 -05:00
|
|
|
npm_specifier_reqs: Default::default(),
|
2023-01-24 09:05:54 -05:00
|
|
|
has_injected_types_node_package: false,
|
2023-06-10 11:09:45 -04:00
|
|
|
specifier_resolver: Arc::new(SpecifierResolver::new(cache)),
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-11 11:43:45 -05:00
|
|
|
pub fn module_graph_imports(&self) -> impl Iterator<Item = &ModuleSpecifier> {
|
|
|
|
self
|
|
|
|
.imports
|
|
|
|
.values()
|
|
|
|
.flat_map(|i| i.dependencies.values())
|
|
|
|
.flat_map(|value| value.get_type().or_else(|| value.get_code()))
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// "Open" a document from the perspective of the editor, meaning that
|
|
|
|
/// requests for information from the document will come from the in-memory
|
|
|
|
/// representation received from the language server client, versus reading
|
|
|
|
/// information from the disk.
|
|
|
|
pub fn open(
|
|
|
|
&mut self,
|
|
|
|
specifier: ModuleSpecifier,
|
|
|
|
version: i32,
|
|
|
|
language_id: LanguageId,
|
2022-05-20 16:40:55 -04:00
|
|
|
content: Arc<str>,
|
2021-11-18 13:50:24 -05:00
|
|
|
) -> Document {
|
2023-02-15 11:30:54 -05:00
|
|
|
let resolver = self.get_resolver();
|
2021-11-18 13:50:24 -05:00
|
|
|
let document = Document::open(
|
|
|
|
specifier.clone(),
|
|
|
|
version,
|
|
|
|
language_id,
|
|
|
|
content,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver,
|
2021-11-18 13:50:24 -05:00
|
|
|
);
|
|
|
|
let mut file_system_docs = self.file_system_docs.lock();
|
|
|
|
file_system_docs.docs.remove(&specifier);
|
|
|
|
file_system_docs.dirty = true;
|
|
|
|
self.open_docs.insert(specifier, document.clone());
|
|
|
|
self.dirty = true;
|
|
|
|
document
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Apply language server content changes to an open document.
|
|
|
|
pub fn change(
|
2021-01-22 05:03:16 -05:00
|
|
|
&mut self,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
version: i32,
|
2021-10-28 19:56:01 -04:00
|
|
|
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
2021-11-12 11:42:04 -05:00
|
|
|
) -> Result<Document, AnyError> {
|
2021-11-18 13:50:24 -05:00
|
|
|
let doc = self
|
|
|
|
.open_docs
|
|
|
|
.get(specifier)
|
|
|
|
.cloned()
|
|
|
|
.or_else(|| {
|
|
|
|
let mut file_system_docs = self.file_system_docs.lock();
|
|
|
|
file_system_docs.docs.remove(specifier)
|
|
|
|
})
|
2023-03-15 17:46:36 -04:00
|
|
|
.map(Ok)
|
|
|
|
.unwrap_or_else(|| {
|
|
|
|
Err(custom_error(
|
|
|
|
"NotFound",
|
|
|
|
format!("The specifier \"{specifier}\" was not found."),
|
|
|
|
))
|
|
|
|
})?;
|
2021-10-28 19:56:01 -04:00
|
|
|
self.dirty = true;
|
2023-02-15 11:30:54 -05:00
|
|
|
let doc = doc.with_change(version, changes, self.get_resolver())?;
|
2021-11-18 13:50:24 -05:00
|
|
|
self.open_docs.insert(doc.specifier().clone(), doc.clone());
|
2021-11-12 11:42:04 -05:00
|
|
|
Ok(doc)
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Close an open document, this essentially clears any editor state that is
|
|
|
|
/// being held, and the document store will revert to the file system if
|
|
|
|
/// information about the document is required.
|
|
|
|
pub fn close(&mut self, specifier: &ModuleSpecifier) -> Result<(), AnyError> {
|
|
|
|
if self.open_docs.remove(specifier).is_some() {
|
|
|
|
self.dirty = true;
|
|
|
|
} else {
|
|
|
|
let mut file_system_docs = self.file_system_docs.lock();
|
|
|
|
if file_system_docs.docs.remove(specifier).is_some() {
|
|
|
|
file_system_docs.dirty = true;
|
|
|
|
} else {
|
|
|
|
return Err(custom_error(
|
2021-10-28 19:56:01 -04:00
|
|
|
"NotFound",
|
2023-01-27 10:43:16 -05:00
|
|
|
format!("The specifier \"{specifier}\" was not found."),
|
2021-11-18 13:50:24 -05:00
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-07 10:39:32 -04:00
|
|
|
Ok(())
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Return `true` if the provided specifier can be resolved to a document,
|
|
|
|
/// otherwise `false`.
|
|
|
|
pub fn contains_import(
|
|
|
|
&self,
|
2021-10-28 19:56:01 -04:00
|
|
|
specifier: &str,
|
|
|
|
referrer: &ModuleSpecifier,
|
|
|
|
) -> bool {
|
2023-02-15 11:30:54 -05:00
|
|
|
let maybe_specifier = self.get_resolver().resolve(specifier, referrer).ok();
|
2021-10-28 19:56:01 -04:00
|
|
|
if let Some(import_specifier) = maybe_specifier {
|
2022-01-25 09:21:59 -05:00
|
|
|
self.exists(&import_specifier)
|
2021-10-28 19:56:01 -04:00
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Return `true` if the specifier can be resolved to a document.
|
2022-01-25 09:21:59 -05:00
|
|
|
pub fn exists(&self, specifier: &ModuleSpecifier) -> bool {
|
|
|
|
let specifier = self.specifier_resolver.resolve(specifier);
|
|
|
|
if let Some(specifier) = specifier {
|
|
|
|
if self.open_docs.contains_key(&specifier) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if let Some(path) = get_document_path(&self.cache, &specifier) {
|
|
|
|
return path.is_file();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
false
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Return an array of specifiers, if any, that are dependent upon the
|
|
|
|
/// supplied specifier. This is used to determine invalidation of diagnostics
|
|
|
|
/// when a module has been changed.
|
|
|
|
pub fn dependents(
|
2021-10-28 19:56:01 -04:00
|
|
|
&mut self,
|
2021-06-03 07:13:53 -04:00
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
) -> Vec<ModuleSpecifier> {
|
2021-11-18 13:50:24 -05:00
|
|
|
self.calculate_dependents_if_dirty();
|
2021-06-03 07:13:53 -04:00
|
|
|
let mut dependents = HashSet::new();
|
2021-11-18 13:50:24 -05:00
|
|
|
if let Some(specifier) = self.specifier_resolver.resolve(specifier) {
|
2021-10-28 19:56:01 -04:00
|
|
|
recurse_dependents(&specifier, &self.dependents_map, &mut dependents);
|
|
|
|
dependents.into_iter().collect()
|
|
|
|
} else {
|
|
|
|
vec![]
|
|
|
|
}
|
2021-06-03 07:13:53 -04:00
|
|
|
}
|
|
|
|
|
2022-10-21 11:20:18 -04:00
|
|
|
/// Returns a collection of npm package requirements.
|
2023-03-30 10:43:16 -04:00
|
|
|
pub fn npm_package_reqs(&mut self) -> Arc<Vec<NpmPackageReq>> {
|
2022-10-21 11:20:18 -04:00
|
|
|
self.calculate_dependents_if_dirty();
|
2023-03-30 10:43:16 -04:00
|
|
|
self.npm_specifier_reqs.clone()
|
2022-10-21 11:20:18 -04:00
|
|
|
}
|
|
|
|
|
2023-01-24 09:05:54 -05:00
|
|
|
/// Returns if a @types/node package was injected into the npm
|
|
|
|
/// resolver based on the state of the documents.
|
|
|
|
pub fn has_injected_types_node_package(&self) -> bool {
|
|
|
|
self.has_injected_types_node_package
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Return a document for the specifier.
|
2022-02-01 21:04:26 -05:00
|
|
|
pub fn get(&self, original_specifier: &ModuleSpecifier) -> Option<Document> {
|
|
|
|
let specifier = self.specifier_resolver.resolve(original_specifier)?;
|
2021-11-18 13:50:24 -05:00
|
|
|
if let Some(document) = self.open_docs.get(&specifier) {
|
|
|
|
Some(document.clone())
|
2021-10-28 19:56:01 -04:00
|
|
|
} else {
|
2021-11-18 13:50:24 -05:00
|
|
|
let mut file_system_docs = self.file_system_docs.lock();
|
2023-02-15 11:30:54 -05:00
|
|
|
file_system_docs.get(&self.cache, self.get_resolver(), &specifier)
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2023-03-29 16:25:48 -04:00
|
|
|
/// Return a collection of documents that are contained in the document store
|
|
|
|
/// based on the provided filter.
|
|
|
|
pub fn documents(&self, filter: DocumentsFilter) -> Vec<Document> {
|
|
|
|
match filter {
|
|
|
|
DocumentsFilter::OpenDiagnosable => self
|
2021-11-18 13:50:24 -05:00
|
|
|
.open_docs
|
|
|
|
.values()
|
|
|
|
.filter_map(|doc| {
|
2023-03-29 16:25:48 -04:00
|
|
|
if doc.is_diagnosable() {
|
2021-11-18 13:50:24 -05:00
|
|
|
Some(doc.clone())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
2023-03-29 16:25:48 -04:00
|
|
|
.collect(),
|
|
|
|
DocumentsFilter::AllDiagnosable | DocumentsFilter::All => {
|
|
|
|
let diagnosable_only =
|
|
|
|
matches!(filter, DocumentsFilter::AllDiagnosable);
|
|
|
|
// it is technically possible for a Document to end up in both the open
|
|
|
|
// and closed documents so we need to ensure we don't return duplicates
|
|
|
|
let mut seen_documents = HashSet::new();
|
|
|
|
let file_system_docs = self.file_system_docs.lock();
|
|
|
|
self
|
|
|
|
.open_docs
|
|
|
|
.values()
|
|
|
|
.chain(file_system_docs.docs.values())
|
|
|
|
.filter_map(|doc| {
|
|
|
|
// this prefers the open documents
|
|
|
|
if seen_documents.insert(doc.specifier().clone())
|
|
|
|
&& (!diagnosable_only || doc.is_diagnosable())
|
|
|
|
{
|
|
|
|
Some(doc.clone())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// For a given set of string specifiers, resolve each one from the graph,
|
|
|
|
/// for a given referrer. This is used to provide resolution information to
|
|
|
|
/// tsc when type checking.
|
|
|
|
pub fn resolve(
|
|
|
|
&self,
|
2023-01-24 09:05:54 -05:00
|
|
|
specifiers: Vec<String>,
|
|
|
|
referrer_doc: &AssetOrDocument,
|
2023-04-24 19:44:35 -04:00
|
|
|
maybe_node_resolver: Option<&Arc<NodeResolver>>,
|
2023-01-24 09:05:54 -05:00
|
|
|
) -> Vec<Option<(ModuleSpecifier, MediaType)>> {
|
|
|
|
let referrer = referrer_doc.specifier();
|
|
|
|
let dependencies = match referrer_doc {
|
|
|
|
AssetOrDocument::Asset(_) => None,
|
|
|
|
AssetOrDocument::Document(doc) => Some(doc.0.dependencies.clone()),
|
|
|
|
};
|
2021-10-28 19:56:01 -04:00
|
|
|
let mut results = Vec::new();
|
2021-11-16 17:23:25 -05:00
|
|
|
for specifier in specifiers {
|
2023-04-17 15:36:23 -04:00
|
|
|
if let Some(node_resolver) = maybe_node_resolver {
|
|
|
|
if node_resolver.in_npm_package(referrer) {
|
2022-10-21 11:20:18 -04:00
|
|
|
// we're in an npm package, so use node resolution
|
|
|
|
results.push(Some(NodeResolution::into_specifier_and_media_type(
|
2023-04-17 15:36:23 -04:00
|
|
|
node_resolver
|
2023-04-24 19:44:35 -04:00
|
|
|
.resolve(
|
2023-04-17 15:36:23 -04:00
|
|
|
&specifier,
|
|
|
|
referrer,
|
|
|
|
NodeResolutionMode::Types,
|
2023-04-24 21:07:48 -04:00
|
|
|
&PermissionsContainer::allow_all(),
|
2023-04-17 15:36:23 -04:00
|
|
|
)
|
|
|
|
.ok()
|
|
|
|
.flatten(),
|
2022-10-21 11:20:18 -04:00
|
|
|
)));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
2023-01-24 09:05:54 -05:00
|
|
|
if let Some(module_name) = specifier.strip_prefix("node:") {
|
2023-05-28 14:44:41 -04:00
|
|
|
if deno_node::is_builtin_node_module(module_name) {
|
2023-01-24 09:05:54 -05:00
|
|
|
// return itself for node: specifiers because during type checking
|
|
|
|
// we resolve to the ambient modules in the @types/node package
|
|
|
|
// rather than deno_std/node
|
|
|
|
results.push(Some((
|
|
|
|
ModuleSpecifier::parse(&specifier).unwrap(),
|
|
|
|
MediaType::Dts,
|
|
|
|
)));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
2021-11-16 17:23:25 -05:00
|
|
|
if specifier.starts_with("asset:") {
|
2023-01-24 09:05:54 -05:00
|
|
|
if let Ok(specifier) = ModuleSpecifier::parse(&specifier) {
|
2023-03-21 11:46:40 -04:00
|
|
|
let media_type = MediaType::from_specifier(&specifier);
|
2021-11-16 17:23:25 -05:00
|
|
|
results.push(Some((specifier, media_type)));
|
|
|
|
} else {
|
|
|
|
results.push(None);
|
|
|
|
}
|
2023-01-24 09:05:54 -05:00
|
|
|
} else if let Some(dep) =
|
|
|
|
dependencies.as_ref().and_then(|d| d.deps.get(&specifier))
|
|
|
|
{
|
2023-02-09 22:00:23 -05:00
|
|
|
if let Some(specifier) = dep.maybe_type.maybe_specifier() {
|
2023-04-17 15:36:23 -04:00
|
|
|
results.push(self.resolve_dependency(specifier, maybe_node_resolver));
|
2023-02-09 22:00:23 -05:00
|
|
|
} else if let Some(specifier) = dep.maybe_code.maybe_specifier() {
|
2023-04-17 15:36:23 -04:00
|
|
|
results.push(self.resolve_dependency(specifier, maybe_node_resolver));
|
2021-01-22 05:03:16 -05:00
|
|
|
} else {
|
2021-10-28 19:56:01 -04:00
|
|
|
results.push(None);
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
2023-02-09 22:00:23 -05:00
|
|
|
} else if let Some(specifier) = self
|
|
|
|
.resolve_imports_dependency(&specifier)
|
|
|
|
.and_then(|r| r.maybe_specifier())
|
2021-11-16 17:23:25 -05:00
|
|
|
{
|
2023-04-17 15:36:23 -04:00
|
|
|
results.push(self.resolve_dependency(specifier, maybe_node_resolver));
|
2023-02-22 14:15:25 -05:00
|
|
|
} else if let Ok(npm_req_ref) =
|
|
|
|
NpmPackageReqReference::from_str(&specifier)
|
|
|
|
{
|
2023-04-17 15:36:23 -04:00
|
|
|
results
|
|
|
|
.push(node_resolve_npm_req_ref(npm_req_ref, maybe_node_resolver));
|
2021-11-16 17:23:25 -05:00
|
|
|
} else {
|
|
|
|
results.push(None);
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
}
|
2023-01-24 09:05:54 -05:00
|
|
|
results
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Update the location of the on disk cache for the document store.
|
2023-06-10 11:09:45 -04:00
|
|
|
pub fn set_location(&mut self, location: PathBuf) {
|
2021-10-28 19:56:01 -04:00
|
|
|
// TODO update resolved dependencies?
|
2023-06-10 11:09:45 -04:00
|
|
|
let cache = HttpCache::new(location);
|
|
|
|
self.cache = cache.clone();
|
|
|
|
self.specifier_resolver = Arc::new(SpecifierResolver::new(cache));
|
2021-10-28 19:56:01 -04:00
|
|
|
self.dirty = true;
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Tries to cache a navigation tree that is associated with the provided specifier
|
|
|
|
/// if the document stored has the same script version.
|
|
|
|
pub fn try_cache_navigation_tree(
|
2022-12-19 20:22:17 -05:00
|
|
|
&self,
|
2021-06-04 17:31:44 -04:00
|
|
|
specifier: &ModuleSpecifier,
|
2021-11-18 13:50:24 -05:00
|
|
|
script_version: &str,
|
2021-10-28 19:56:01 -04:00
|
|
|
navigation_tree: Arc<tsc::NavigationTree>,
|
2021-06-04 17:31:44 -04:00
|
|
|
) -> Result<(), AnyError> {
|
2022-12-19 20:22:17 -05:00
|
|
|
if let Some(doc) = self.open_docs.get(specifier) {
|
|
|
|
doc.update_navigation_tree_if_version(navigation_tree, script_version)
|
2021-11-18 13:50:24 -05:00
|
|
|
} else {
|
|
|
|
let mut file_system_docs = self.file_system_docs.lock();
|
|
|
|
if let Some(doc) = file_system_docs.docs.get_mut(specifier) {
|
2022-12-19 20:22:17 -05:00
|
|
|
doc.update_navigation_tree_if_version(navigation_tree, script_version);
|
2021-11-18 13:50:24 -05:00
|
|
|
} else {
|
|
|
|
return Err(custom_error(
|
|
|
|
"NotFound",
|
2023-01-27 10:43:16 -05:00
|
|
|
format!("Specifier not found {specifier}"),
|
2021-11-18 13:50:24 -05:00
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) {
|
2023-01-28 10:18:32 -05:00
|
|
|
fn calculate_resolver_config_hash(
|
2023-04-01 15:10:30 -04:00
|
|
|
enabled_urls: &[Url],
|
2023-05-11 21:53:09 -04:00
|
|
|
document_preload_limit: usize,
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_import_map: Option<&import_map::ImportMap>,
|
|
|
|
maybe_jsx_config: Option<&JsxImportSourceConfig>,
|
2023-03-03 17:27:05 -05:00
|
|
|
maybe_package_json_deps: Option<&PackageJsonDeps>,
|
2023-01-28 10:18:32 -05:00
|
|
|
) -> u64 {
|
|
|
|
let mut hasher = FastInsecureHasher::default();
|
2023-05-11 21:53:09 -04:00
|
|
|
hasher.write_hashable(&document_preload_limit);
|
2023-03-30 17:47:53 -04:00
|
|
|
hasher.write_hashable(&{
|
2023-04-01 15:10:30 -04:00
|
|
|
// ensure these are sorted so the hashing is deterministic
|
|
|
|
let mut enabled_urls = enabled_urls.to_vec();
|
|
|
|
enabled_urls.sort_unstable();
|
|
|
|
enabled_urls
|
2023-03-30 17:47:53 -04:00
|
|
|
});
|
2023-01-28 10:18:32 -05:00
|
|
|
if let Some(import_map) = maybe_import_map {
|
|
|
|
hasher.write_str(&import_map.to_json());
|
|
|
|
hasher.write_str(import_map.base_url().as_str());
|
|
|
|
}
|
2023-03-15 10:34:23 -04:00
|
|
|
hasher.write_hashable(&maybe_jsx_config);
|
2023-04-06 18:46:44 -04:00
|
|
|
if let Some(package_json_deps) = &maybe_package_json_deps {
|
|
|
|
// We need to ensure the hashing is deterministic so explicitly type
|
|
|
|
// this in order to catch if the type of package_json_deps ever changes
|
|
|
|
// from a sorted/deterministic BTreeMap to something else.
|
|
|
|
let package_json_deps: &BTreeMap<_, _> = *package_json_deps;
|
|
|
|
for (key, value) in package_json_deps {
|
|
|
|
hasher.write_hashable(key);
|
|
|
|
match value {
|
|
|
|
Ok(value) => {
|
|
|
|
hasher.write_hashable(value);
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
hasher.write_str(&err.to_string());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-28 10:18:32 -05:00
|
|
|
hasher.finish()
|
|
|
|
}
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
let maybe_package_json_deps =
|
|
|
|
options.maybe_package_json.map(|package_json| {
|
|
|
|
package_json::get_local_package_json_version_reqs(package_json)
|
|
|
|
});
|
|
|
|
let maybe_jsx_config = options
|
|
|
|
.maybe_config_file
|
|
|
|
.and_then(|cf| cf.to_maybe_jsx_import_source_config());
|
2023-01-28 10:18:32 -05:00
|
|
|
let new_resolver_config_hash = calculate_resolver_config_hash(
|
2023-05-11 17:17:14 -04:00
|
|
|
&options.enabled_urls,
|
2023-05-11 21:53:09 -04:00
|
|
|
options.document_preload_limit,
|
2023-05-11 17:17:14 -04:00
|
|
|
options.maybe_import_map.as_deref(),
|
2023-01-28 10:18:32 -05:00
|
|
|
maybe_jsx_config.as_ref(),
|
2023-02-23 10:58:10 -05:00
|
|
|
maybe_package_json_deps.as_ref(),
|
2023-01-28 10:18:32 -05:00
|
|
|
);
|
2023-05-10 20:06:59 -04:00
|
|
|
let deps_provider =
|
|
|
|
Arc::new(PackageJsonDepsProvider::new(maybe_package_json_deps));
|
2023-05-22 21:28:36 -04:00
|
|
|
let deps_installer = Arc::new(PackageJsonDepsInstaller::no_op());
|
2023-04-14 16:22:33 -04:00
|
|
|
self.resolver = Arc::new(CliGraphResolver::new(
|
2023-02-22 14:15:25 -05:00
|
|
|
maybe_jsx_config,
|
2023-05-11 17:17:14 -04:00
|
|
|
options.maybe_import_map,
|
2023-02-22 14:15:25 -05:00
|
|
|
false,
|
2023-05-11 17:17:14 -04:00
|
|
|
options.npm_registry_api,
|
|
|
|
options.npm_resolution,
|
2023-05-10 20:06:59 -04:00
|
|
|
deps_provider,
|
2023-02-24 19:35:43 -05:00
|
|
|
deps_installer,
|
2023-04-14 16:22:33 -04:00
|
|
|
));
|
2021-11-18 13:50:24 -05:00
|
|
|
self.imports = Arc::new(
|
2023-02-09 22:00:23 -05:00
|
|
|
if let Some(Ok(imports)) =
|
2023-05-11 17:17:14 -04:00
|
|
|
options.maybe_config_file.map(|cf| cf.to_maybe_imports())
|
2021-11-18 13:50:24 -05:00
|
|
|
{
|
|
|
|
imports
|
|
|
|
.into_iter()
|
2023-02-09 22:00:23 -05:00
|
|
|
.map(|import| {
|
2022-08-09 17:27:22 -04:00
|
|
|
let graph_import = GraphImport::new(
|
2023-02-09 22:00:23 -05:00
|
|
|
&import.referrer,
|
|
|
|
import.imports,
|
2023-02-15 11:30:54 -05:00
|
|
|
Some(self.get_resolver()),
|
2021-11-18 13:50:24 -05:00
|
|
|
);
|
2023-02-09 22:00:23 -05:00
|
|
|
(import.referrer, graph_import)
|
2021-11-18 13:50:24 -05:00
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
} else {
|
2023-03-11 11:43:45 -05:00
|
|
|
IndexMap::new()
|
2021-11-18 13:50:24 -05:00
|
|
|
},
|
|
|
|
);
|
2023-01-28 10:18:32 -05:00
|
|
|
|
|
|
|
// only refresh the dependencies if the underlying configuration has changed
|
|
|
|
if self.resolver_config_hash != new_resolver_config_hash {
|
2023-05-11 17:17:14 -04:00
|
|
|
self.refresh_dependencies(
|
|
|
|
options.enabled_urls,
|
|
|
|
options.document_preload_limit,
|
|
|
|
);
|
2023-01-28 10:18:32 -05:00
|
|
|
self.resolver_config_hash = new_resolver_config_hash;
|
|
|
|
}
|
|
|
|
|
2021-11-08 20:26:39 -05:00
|
|
|
self.dirty = true;
|
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
fn refresh_dependencies(
|
|
|
|
&mut self,
|
|
|
|
enabled_urls: Vec<Url>,
|
|
|
|
document_preload_limit: usize,
|
|
|
|
) {
|
2023-02-15 11:30:54 -05:00
|
|
|
let resolver = self.resolver.as_graph_resolver();
|
2023-01-28 10:18:32 -05:00
|
|
|
for doc in self.open_docs.values_mut() {
|
2023-02-15 11:30:54 -05:00
|
|
|
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
2023-01-28 10:18:32 -05:00
|
|
|
*doc = new_doc;
|
|
|
|
}
|
|
|
|
}
|
2023-03-30 17:47:53 -04:00
|
|
|
|
|
|
|
// update the file system documents
|
|
|
|
let mut fs_docs = self.file_system_docs.lock();
|
2023-05-11 17:17:14 -04:00
|
|
|
if document_preload_limit > 0 {
|
|
|
|
let mut not_found_docs =
|
|
|
|
fs_docs.docs.keys().cloned().collect::<HashSet<_>>();
|
|
|
|
let open_docs = &mut self.open_docs;
|
|
|
|
|
|
|
|
log::debug!("Preloading documents from enabled urls...");
|
|
|
|
let mut finder = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
|
|
|
&enabled_urls,
|
|
|
|
document_preload_limit,
|
|
|
|
);
|
|
|
|
for specifier in finder.by_ref() {
|
|
|
|
// mark this document as having been found
|
|
|
|
not_found_docs.remove(&specifier);
|
2023-04-01 12:02:44 -04:00
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
if !open_docs.contains_key(&specifier)
|
|
|
|
&& !fs_docs.docs.contains_key(&specifier)
|
|
|
|
{
|
|
|
|
fs_docs.refresh_document(&self.cache, resolver, &specifier);
|
|
|
|
} else {
|
|
|
|
// update the existing entry to have the new resolver
|
|
|
|
if let Some(doc) = fs_docs.docs.get_mut(&specifier) {
|
|
|
|
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
|
|
|
*doc = new_doc;
|
2023-04-01 12:02:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-05-11 17:17:14 -04:00
|
|
|
}
|
2023-03-30 17:47:53 -04:00
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
if finder.hit_limit() {
|
|
|
|
lsp_warn!(
|
|
|
|
concat!(
|
|
|
|
"Hit the language server document preload limit of {} file system entries. ",
|
|
|
|
"You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ",
|
|
|
|
"partially enable a workspace or increase the limit via \"deno.documentPreloadLimit\". ",
|
|
|
|
"In cases where Deno ends up using too much memory, you may want to lower the limit."
|
|
|
|
),
|
|
|
|
document_preload_limit,
|
|
|
|
);
|
|
|
|
|
|
|
|
// since we hit the limit, just update everything to use the new resolver
|
|
|
|
for uri in not_found_docs {
|
|
|
|
if let Some(doc) = fs_docs.docs.get_mut(&uri) {
|
|
|
|
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
|
|
|
*doc = new_doc;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2023-04-01 12:02:44 -04:00
|
|
|
// clean up and remove any documents that weren't found
|
|
|
|
for uri in not_found_docs {
|
|
|
|
fs_docs.docs.remove(&uri);
|
|
|
|
}
|
|
|
|
}
|
2023-05-11 17:17:14 -04:00
|
|
|
} else {
|
|
|
|
// This log statement is used in the tests to ensure preloading doesn't
|
|
|
|
// happen, which is not useful in the repl and could be very expensive
|
|
|
|
// if the repl is launched from a directory with a lot of descendants.
|
|
|
|
log::debug!("Skipping document preload.");
|
|
|
|
|
|
|
|
// just update to use the new resolver
|
|
|
|
for doc in fs_docs.docs.values_mut() {
|
|
|
|
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
|
|
|
*doc = new_doc;
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-05-11 17:17:14 -04:00
|
|
|
|
2023-03-30 17:47:53 -04:00
|
|
|
fs_docs.dirty = true;
|
2023-01-28 10:18:32 -05:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Iterate through the documents, building a map where the key is a unique
|
|
|
|
/// document and the value is a set of specifiers that depend on that
|
|
|
|
/// document.
|
|
|
|
fn calculate_dependents_if_dirty(&mut self) {
|
2022-11-29 19:32:18 -05:00
|
|
|
#[derive(Default)]
|
|
|
|
struct DocAnalyzer {
|
|
|
|
dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>,
|
|
|
|
analyzed_specifiers: HashSet<ModuleSpecifier>,
|
|
|
|
pending_specifiers: VecDeque<ModuleSpecifier>,
|
|
|
|
npm_reqs: HashSet<NpmPackageReq>,
|
2023-01-24 09:05:54 -05:00
|
|
|
has_node_builtin_specifier: bool,
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
|
2022-11-29 19:32:18 -05:00
|
|
|
impl DocAnalyzer {
|
|
|
|
fn add(&mut self, dep: &ModuleSpecifier, specifier: &ModuleSpecifier) {
|
|
|
|
if !self.analyzed_specifiers.contains(dep) {
|
|
|
|
self.analyzed_specifiers.insert(dep.clone());
|
|
|
|
// perf: ensure this is not added to unless this specifier has never
|
|
|
|
// been analyzed in order to not cause an extra file system lookup
|
|
|
|
self.pending_specifiers.push_back(dep.clone());
|
2023-02-21 12:03:48 -05:00
|
|
|
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
2022-11-29 19:32:18 -05:00
|
|
|
self.npm_reqs.insert(reference.req);
|
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
2022-11-29 19:32:18 -05:00
|
|
|
|
|
|
|
self
|
|
|
|
.dependents_map
|
2022-10-21 11:20:18 -04:00
|
|
|
.entry(dep.clone())
|
|
|
|
.or_default()
|
|
|
|
.insert(specifier.clone());
|
|
|
|
}
|
2022-11-29 19:32:18 -05:00
|
|
|
|
|
|
|
fn analyze_doc(&mut self, specifier: &ModuleSpecifier, doc: &Document) {
|
|
|
|
self.analyzed_specifiers.insert(specifier.clone());
|
2023-01-24 09:05:54 -05:00
|
|
|
for (name, dependency) in doc.dependencies() {
|
|
|
|
if !self.has_node_builtin_specifier && name.starts_with("node:") {
|
|
|
|
self.has_node_builtin_specifier = true;
|
|
|
|
}
|
|
|
|
|
2022-11-29 19:32:18 -05:00
|
|
|
if let Some(dep) = dependency.get_code() {
|
|
|
|
self.add(dep, specifier);
|
|
|
|
}
|
|
|
|
if let Some(dep) = dependency.get_type() {
|
|
|
|
self.add(dep, specifier);
|
|
|
|
}
|
|
|
|
}
|
2023-02-09 22:00:23 -05:00
|
|
|
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
|
|
|
|
self.add(dep, specifier);
|
2022-11-29 19:32:18 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut file_system_docs = self.file_system_docs.lock();
|
|
|
|
if !file_system_docs.dirty && !self.dirty {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut doc_analyzer = DocAnalyzer::default();
|
|
|
|
// favor documents that are open in case a document exists in both collections
|
|
|
|
let documents = file_system_docs.docs.iter().chain(self.open_docs.iter());
|
|
|
|
for (specifier, doc) in documents {
|
|
|
|
doc_analyzer.analyze_doc(specifier, doc);
|
2022-10-21 11:20:18 -04:00
|
|
|
}
|
2022-11-29 19:32:18 -05:00
|
|
|
|
2023-02-15 11:30:54 -05:00
|
|
|
let resolver = self.get_resolver();
|
2022-11-29 19:32:18 -05:00
|
|
|
while let Some(specifier) = doc_analyzer.pending_specifiers.pop_front() {
|
2023-02-15 11:30:54 -05:00
|
|
|
if let Some(doc) = file_system_docs.get(&self.cache, resolver, &specifier)
|
2022-11-29 19:32:18 -05:00
|
|
|
{
|
|
|
|
doc_analyzer.analyze_doc(&specifier, &doc);
|
2022-10-21 11:20:18 -04:00
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
2022-11-29 19:32:18 -05:00
|
|
|
|
2023-01-24 09:05:54 -05:00
|
|
|
let mut npm_reqs = doc_analyzer.npm_reqs;
|
|
|
|
// Ensure a @types/node package exists when any module uses a node: specifier.
|
|
|
|
// Unlike on the command line, here we just add @types/node to the npm package
|
|
|
|
// requirements since this won't end up in the lockfile.
|
|
|
|
self.has_injected_types_node_package = doc_analyzer
|
|
|
|
.has_node_builtin_specifier
|
|
|
|
&& !npm_reqs.iter().any(|r| r.name == "@types/node");
|
|
|
|
if self.has_injected_types_node_package {
|
|
|
|
npm_reqs.insert(NpmPackageReq::from_str("@types/node").unwrap());
|
|
|
|
}
|
|
|
|
|
2022-11-29 19:32:18 -05:00
|
|
|
self.dependents_map = Arc::new(doc_analyzer.dependents_map);
|
2023-02-23 10:58:10 -05:00
|
|
|
self.npm_specifier_reqs = Arc::new({
|
|
|
|
let mut reqs = npm_reqs.into_iter().collect::<Vec<_>>();
|
|
|
|
reqs.sort();
|
|
|
|
reqs
|
|
|
|
});
|
2021-11-18 13:50:24 -05:00
|
|
|
self.dirty = false;
|
|
|
|
file_system_docs.dirty = false;
|
2021-11-12 11:42:04 -05:00
|
|
|
}
|
|
|
|
|
2023-02-15 11:30:54 -05:00
|
|
|
fn get_resolver(&self) -> &dyn deno_graph::source::Resolver {
|
|
|
|
self.resolver.as_graph_resolver()
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
fn resolve_dependency(
|
2021-10-28 19:56:01 -04:00
|
|
|
&self,
|
|
|
|
specifier: &ModuleSpecifier,
|
2023-04-24 19:44:35 -04:00
|
|
|
maybe_node_resolver: Option<&Arc<NodeResolver>>,
|
2021-11-18 13:50:24 -05:00
|
|
|
) -> Option<(ModuleSpecifier, MediaType)> {
|
2023-02-21 12:03:48 -05:00
|
|
|
if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(specifier) {
|
2023-04-17 15:36:23 -04:00
|
|
|
return node_resolve_npm_req_ref(npm_ref, maybe_node_resolver);
|
2022-10-21 11:20:18 -04:00
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
let doc = self.get(specifier)?;
|
2023-02-22 14:15:25 -05:00
|
|
|
let maybe_module = doc.maybe_esm_module().and_then(|r| r.as_ref().ok());
|
2023-02-09 22:00:23 -05:00
|
|
|
let maybe_types_dependency = maybe_module
|
|
|
|
.and_then(|m| m.maybe_types_dependency.as_ref().map(|d| &d.dependency));
|
|
|
|
if let Some(specifier) =
|
|
|
|
maybe_types_dependency.and_then(|d| d.maybe_specifier())
|
|
|
|
{
|
2023-04-17 15:36:23 -04:00
|
|
|
self.resolve_dependency(specifier, maybe_node_resolver)
|
2021-11-18 13:50:24 -05:00
|
|
|
} else {
|
|
|
|
let media_type = doc.media_type();
|
|
|
|
Some((specifier.clone(), media_type))
|
|
|
|
}
|
2021-10-28 19:56:01 -04:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:50:24 -05:00
|
|
|
/// Iterate through any "imported" modules, checking to see if a dependency
|
|
|
|
/// is available. This is used to provide "global" imports like the JSX import
|
|
|
|
/// source.
|
2023-02-09 22:00:23 -05:00
|
|
|
fn resolve_imports_dependency(&self, specifier: &str) -> Option<&Resolution> {
|
2022-08-09 17:27:22 -04:00
|
|
|
for graph_imports in self.imports.values() {
|
|
|
|
let maybe_dep = graph_imports.dependencies.get(specifier);
|
2021-11-18 13:50:24 -05:00
|
|
|
if maybe_dep.is_some() {
|
2022-01-31 17:33:57 -05:00
|
|
|
return maybe_dep.map(|d| &d.maybe_type);
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
2021-11-08 20:26:39 -05:00
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
2023-02-22 14:15:25 -05:00
|
|
|
fn node_resolve_npm_req_ref(
|
|
|
|
npm_req_ref: NpmPackageReqReference,
|
2023-04-24 19:44:35 -04:00
|
|
|
maybe_node_resolver: Option<&Arc<NodeResolver>>,
|
2023-02-22 14:15:25 -05:00
|
|
|
) -> Option<(ModuleSpecifier, MediaType)> {
|
2023-04-17 15:36:23 -04:00
|
|
|
maybe_node_resolver.map(|node_resolver| {
|
2023-02-22 14:15:25 -05:00
|
|
|
NodeResolution::into_specifier_and_media_type(
|
2023-04-17 15:36:23 -04:00
|
|
|
node_resolver
|
2023-04-24 19:44:35 -04:00
|
|
|
.resolve_npm_req_reference(
|
2023-04-17 15:36:23 -04:00
|
|
|
&npm_req_ref,
|
|
|
|
NodeResolutionMode::Types,
|
2023-04-24 21:07:48 -04:00
|
|
|
&PermissionsContainer::allow_all(),
|
2023-04-17 15:36:23 -04:00
|
|
|
)
|
2023-02-22 14:15:25 -05:00
|
|
|
.ok()
|
2023-04-17 15:36:23 -04:00
|
|
|
.flatten(),
|
2023-02-22 14:15:25 -05:00
|
|
|
)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-10-20 13:23:21 -04:00
|
|
|
/// Loader that will look at the open documents.
|
2022-10-21 11:20:18 -04:00
|
|
|
pub struct OpenDocumentsGraphLoader<'a> {
|
2022-10-20 13:23:21 -04:00
|
|
|
pub inner_loader: &'a mut dyn deno_graph::source::Loader,
|
|
|
|
pub open_docs: &'a HashMap<ModuleSpecifier, Document>,
|
|
|
|
}
|
|
|
|
|
2022-10-21 11:20:18 -04:00
|
|
|
impl<'a> deno_graph::source::Loader for OpenDocumentsGraphLoader<'a> {
|
2022-10-20 13:23:21 -04:00
|
|
|
fn load(
|
|
|
|
&mut self,
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
is_dynamic: bool,
|
|
|
|
) -> deno_graph::source::LoadFuture {
|
|
|
|
if specifier.scheme() == "file" {
|
|
|
|
if let Some(doc) = self.open_docs.get(specifier) {
|
|
|
|
return Box::pin(future::ready(Ok(Some(
|
|
|
|
deno_graph::source::LoadResponse::Module {
|
|
|
|
content: doc.content(),
|
|
|
|
specifier: doc.specifier().clone(),
|
|
|
|
maybe_headers: None,
|
|
|
|
},
|
|
|
|
))));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.inner_loader.load(specifier, is_dynamic)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-28 10:18:32 -05:00
|
|
|
fn parse_and_analyze_module(
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
text_info: SourceTextInfo,
|
|
|
|
maybe_headers: Option<&HashMap<String, String>>,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2023-01-28 10:18:32 -05:00
|
|
|
) -> (Option<ParsedSourceResult>, Option<ModuleResult>) {
|
|
|
|
let parsed_source_result = parse_source(specifier, text_info, maybe_headers);
|
2023-02-15 11:30:54 -05:00
|
|
|
let module_result =
|
|
|
|
analyze_module(specifier, &parsed_source_result, maybe_headers, resolver);
|
2023-01-28 10:18:32 -05:00
|
|
|
(Some(parsed_source_result), Some(module_result))
|
|
|
|
}
|
2022-08-22 12:14:59 -04:00
|
|
|
|
2023-01-28 10:18:32 -05:00
|
|
|
fn parse_source(
|
|
|
|
specifier: &ModuleSpecifier,
|
|
|
|
text_info: SourceTextInfo,
|
|
|
|
maybe_headers: Option<&HashMap<String, String>>,
|
|
|
|
) -> ParsedSourceResult {
|
|
|
|
deno_ast::parse_module(deno_ast::ParseParams {
|
|
|
|
specifier: specifier.to_string(),
|
|
|
|
text_info,
|
|
|
|
media_type: MediaType::from_specifier_and_headers(specifier, maybe_headers),
|
|
|
|
capture_tokens: true,
|
|
|
|
scope_analysis: true,
|
|
|
|
maybe_syntax: None,
|
|
|
|
})
|
2022-08-22 12:14:59 -04:00
|
|
|
}
|
|
|
|
|
2023-01-28 10:18:32 -05:00
|
|
|
fn analyze_module(
|
2022-08-22 12:14:59 -04:00
|
|
|
specifier: &ModuleSpecifier,
|
2023-01-28 10:18:32 -05:00
|
|
|
parsed_source_result: &ParsedSourceResult,
|
2022-08-22 12:14:59 -04:00
|
|
|
maybe_headers: Option<&HashMap<String, String>>,
|
2023-02-15 11:30:54 -05:00
|
|
|
resolver: &dyn deno_graph::source::Resolver,
|
2023-01-28 10:18:32 -05:00
|
|
|
) -> ModuleResult {
|
|
|
|
match parsed_source_result {
|
2023-02-03 14:10:51 -05:00
|
|
|
Ok(parsed_source) => Ok(deno_graph::parse_module_from_ast(
|
|
|
|
specifier,
|
|
|
|
maybe_headers,
|
|
|
|
parsed_source,
|
2023-02-15 11:30:54 -05:00
|
|
|
Some(resolver),
|
2023-02-03 14:10:51 -05:00
|
|
|
)),
|
2023-03-21 11:46:40 -04:00
|
|
|
Err(err) => Err(deno_graph::ModuleGraphError::ModuleError(
|
|
|
|
deno_graph::ModuleError::ParseErr(specifier.clone(), err.clone()),
|
2022-08-22 12:14:59 -04:00
|
|
|
)),
|
2023-01-28 10:18:32 -05:00
|
|
|
}
|
2022-08-22 12:14:59 -04:00
|
|
|
}
|
|
|
|
|
2023-04-01 15:10:30 -04:00
|
|
|
enum PendingEntry {
|
|
|
|
/// File specified as a root url.
|
|
|
|
SpecifiedRootFile(PathBuf),
|
|
|
|
/// Directory that is queued to read.
|
|
|
|
Dir(PathBuf),
|
|
|
|
/// The current directory being read.
|
|
|
|
ReadDir(Box<ReadDir>),
|
|
|
|
}
|
|
|
|
|
2023-03-30 17:47:53 -04:00
|
|
|
/// Iterator that finds documents that can be preloaded into
|
|
|
|
/// the LSP on startup.
|
|
|
|
struct PreloadDocumentFinder {
|
2023-05-11 17:17:14 -04:00
|
|
|
limit: usize,
|
|
|
|
entry_count: usize,
|
2023-04-01 15:10:30 -04:00
|
|
|
pending_entries: VecDeque<PendingEntry>,
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
impl PreloadDocumentFinder {
|
2023-04-01 15:10:30 -04:00
|
|
|
pub fn from_enabled_urls_with_limit(
|
|
|
|
enabled_urls: &Vec<Url>,
|
2023-05-11 17:17:14 -04:00
|
|
|
limit: usize,
|
2023-04-01 15:10:30 -04:00
|
|
|
) -> Self {
|
|
|
|
fn is_allowed_root_dir(dir_path: &Path) -> bool {
|
|
|
|
if dir_path.parent().is_none() {
|
|
|
|
// never search the root directory of a drive
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
true
|
|
|
|
}
|
|
|
|
|
2023-03-30 17:47:53 -04:00
|
|
|
let mut finder = PreloadDocumentFinder {
|
2023-04-01 15:10:30 -04:00
|
|
|
limit,
|
|
|
|
entry_count: 0,
|
|
|
|
pending_entries: Default::default(),
|
2023-03-30 17:47:53 -04:00
|
|
|
};
|
2023-04-01 15:10:30 -04:00
|
|
|
let mut dirs = Vec::with_capacity(enabled_urls.len());
|
|
|
|
for enabled_url in enabled_urls {
|
|
|
|
if let Ok(path) = enabled_url.to_file_path() {
|
2023-03-30 17:47:53 -04:00
|
|
|
if path.is_dir() {
|
2023-04-01 15:10:30 -04:00
|
|
|
if is_allowed_root_dir(&path) {
|
|
|
|
dirs.push(path);
|
2023-04-01 12:02:44 -04:00
|
|
|
}
|
2023-03-30 17:47:53 -04:00
|
|
|
} else {
|
2023-04-01 15:10:30 -04:00
|
|
|
finder
|
|
|
|
.pending_entries
|
|
|
|
.push_back(PendingEntry::SpecifiedRootFile(path));
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-01 15:10:30 -04:00
|
|
|
for dir in sort_and_remove_non_leaf_dirs(dirs) {
|
|
|
|
finder.pending_entries.push_back(PendingEntry::Dir(dir));
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
2023-04-01 15:10:30 -04:00
|
|
|
finder
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
pub fn hit_limit(&self) -> bool {
|
|
|
|
self.entry_count >= self.limit
|
|
|
|
}
|
|
|
|
|
2023-03-30 17:47:53 -04:00
|
|
|
fn get_valid_specifier(path: &Path) -> Option<ModuleSpecifier> {
|
2023-04-01 15:10:30 -04:00
|
|
|
fn is_allowed_media_type(media_type: MediaType) -> bool {
|
2023-03-30 17:47:53 -04:00
|
|
|
match media_type {
|
|
|
|
MediaType::JavaScript
|
|
|
|
| MediaType::Jsx
|
|
|
|
| MediaType::Mjs
|
|
|
|
| MediaType::Cjs
|
|
|
|
| MediaType::TypeScript
|
|
|
|
| MediaType::Mts
|
|
|
|
| MediaType::Cts
|
|
|
|
| MediaType::Dts
|
|
|
|
| MediaType::Dmts
|
|
|
|
| MediaType::Dcts
|
|
|
|
| MediaType::Tsx => true,
|
|
|
|
MediaType::Json // ignore because json never depends on other files
|
|
|
|
| MediaType::Wasm
|
|
|
|
| MediaType::SourceMap
|
|
|
|
| MediaType::TsBuildInfo
|
|
|
|
| MediaType::Unknown => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let media_type = MediaType::from_path(path);
|
2023-04-01 15:10:30 -04:00
|
|
|
if is_allowed_media_type(media_type) {
|
2023-03-30 17:47:53 -04:00
|
|
|
if let Ok(specifier) = ModuleSpecifier::from_file_path(path) {
|
|
|
|
return Some(specifier);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Iterator for PreloadDocumentFinder {
|
|
|
|
type Item = ModuleSpecifier;
|
|
|
|
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
2023-04-01 15:10:30 -04:00
|
|
|
fn is_discoverable_dir(dir_path: &Path) -> bool {
|
|
|
|
if let Some(dir_name) = dir_path.file_name() {
|
|
|
|
let dir_name = dir_name.to_string_lossy().to_lowercase();
|
|
|
|
// We ignore these directories by default because there is a
|
|
|
|
// high likelihood they aren't relevant. Someone can opt-into
|
|
|
|
// them by specifying one of them as an enabled path.
|
|
|
|
if matches!(dir_name.as_str(), "node_modules" | ".git") {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// ignore cargo target directories for anyone using Deno with Rust
|
|
|
|
if dir_name == "target"
|
|
|
|
&& dir_path
|
|
|
|
.parent()
|
|
|
|
.map(|p| p.join("Cargo.toml").exists())
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-01 15:10:30 -04:00
|
|
|
fn is_discoverable_file(file_path: &Path) -> bool {
|
|
|
|
// Don't auto-discover minified files as they are likely to be very large
|
|
|
|
// and likely not to have dependencies on code outside them that would
|
|
|
|
// be useful in the LSP
|
|
|
|
if let Some(file_name) = file_path.file_name() {
|
|
|
|
let file_name = file_name.to_string_lossy().to_lowercase();
|
|
|
|
!file_name.as_str().contains(".min.")
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
while let Some(entry) = self.pending_entries.pop_front() {
|
|
|
|
match entry {
|
|
|
|
PendingEntry::SpecifiedRootFile(file) => {
|
|
|
|
// since it was a file that was specified as a root url, only
|
|
|
|
// verify that it's valid
|
|
|
|
if let Some(specifier) = Self::get_valid_specifier(&file) {
|
|
|
|
return Some(specifier);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
PendingEntry::Dir(dir_path) => {
|
|
|
|
if let Ok(read_dir) = fs::read_dir(&dir_path) {
|
|
|
|
self
|
|
|
|
.pending_entries
|
|
|
|
.push_back(PendingEntry::ReadDir(Box::new(read_dir)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
PendingEntry::ReadDir(mut entries) => {
|
|
|
|
while let Some(entry) = entries.next() {
|
|
|
|
self.entry_count += 1;
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
if self.hit_limit() {
|
2023-04-01 15:10:30 -04:00
|
|
|
self.pending_entries.clear(); // stop searching
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Ok(entry) = entry {
|
|
|
|
let path = entry.path();
|
|
|
|
if let Ok(file_type) = entry.file_type() {
|
|
|
|
if file_type.is_dir() && is_discoverable_dir(&path) {
|
|
|
|
self
|
|
|
|
.pending_entries
|
|
|
|
.push_back(PendingEntry::Dir(path.to_path_buf()));
|
|
|
|
} else if file_type.is_file() && is_discoverable_file(&path) {
|
|
|
|
if let Some(specifier) = Self::get_valid_specifier(&path) {
|
|
|
|
// restore the next entries for next time
|
|
|
|
self
|
|
|
|
.pending_entries
|
|
|
|
.push_front(PendingEntry::ReadDir(entries));
|
|
|
|
return Some(specifier);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-01 15:10:30 -04:00
|
|
|
|
2023-03-30 17:47:53 -04:00
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-01 15:10:30 -04:00
|
|
|
/// Removes any directorys that are a descendant of another directory in the collection.
|
|
|
|
fn sort_and_remove_non_leaf_dirs(mut dirs: Vec<PathBuf>) -> Vec<PathBuf> {
|
|
|
|
if dirs.is_empty() {
|
|
|
|
return dirs;
|
|
|
|
}
|
|
|
|
|
|
|
|
dirs.sort();
|
|
|
|
if !dirs.is_empty() {
|
|
|
|
for i in (0..dirs.len() - 1).rev() {
|
|
|
|
let prev = &dirs[i + 1];
|
|
|
|
if prev.starts_with(&dirs[i]) {
|
|
|
|
dirs.remove(i + 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dirs
|
|
|
|
}
|
|
|
|
|
2021-01-22 05:03:16 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2023-02-22 14:15:25 -05:00
|
|
|
use crate::npm::NpmResolution;
|
|
|
|
|
2021-01-22 05:03:16 -05:00
|
|
|
use super::*;
|
2023-01-28 10:18:32 -05:00
|
|
|
use import_map::ImportMap;
|
2023-03-30 17:47:53 -04:00
|
|
|
use pretty_assertions::assert_eq;
|
2023-06-10 11:09:45 -04:00
|
|
|
use test_util::PathRef;
|
2022-04-01 11:15:37 -04:00
|
|
|
use test_util::TempDir;
|
2021-01-22 05:03:16 -05:00
|
|
|
|
2023-06-10 11:09:45 -04:00
|
|
|
fn setup(temp_dir: &TempDir) -> (Documents, PathRef) {
|
2021-10-28 19:56:01 -04:00
|
|
|
let location = temp_dir.path().join("deps");
|
2023-06-10 11:09:45 -04:00
|
|
|
let documents = Documents::new(location.to_path_buf());
|
2021-10-28 19:56:01 -04:00
|
|
|
(documents, location)
|
2021-08-18 23:19:12 -04:00
|
|
|
}
|
|
|
|
|
2021-01-22 05:03:16 -05:00
|
|
|
#[test]
|
2021-10-28 19:56:01 -04:00
|
|
|
fn test_documents_open() {
|
2022-04-01 11:15:37 -04:00
|
|
|
let temp_dir = TempDir::new();
|
|
|
|
let (mut documents, _) = setup(&temp_dir);
|
2021-10-28 19:56:01 -04:00
|
|
|
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
2022-05-20 16:40:55 -04:00
|
|
|
let content = r#"import * as b from "./b.ts";
|
2021-10-28 19:56:01 -04:00
|
|
|
console.log(b);
|
2022-05-20 16:40:55 -04:00
|
|
|
"#;
|
|
|
|
let document = documents.open(
|
|
|
|
specifier,
|
|
|
|
1,
|
|
|
|
"javascript".parse().unwrap(),
|
|
|
|
content.into(),
|
2021-06-02 06:29:58 -04:00
|
|
|
);
|
2021-11-12 11:42:04 -05:00
|
|
|
assert!(document.is_open());
|
|
|
|
assert!(document.is_diagnosable());
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2021-10-28 19:56:01 -04:00
|
|
|
fn test_documents_change() {
|
2022-04-01 11:15:37 -04:00
|
|
|
let temp_dir = TempDir::new();
|
|
|
|
let (mut documents, _) = setup(&temp_dir);
|
2021-10-28 19:56:01 -04:00
|
|
|
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
2022-05-20 16:40:55 -04:00
|
|
|
let content = r#"import * as b from "./b.ts";
|
2021-10-28 19:56:01 -04:00
|
|
|
console.log(b);
|
2022-05-20 16:40:55 -04:00
|
|
|
"#;
|
2021-10-28 19:56:01 -04:00
|
|
|
documents.open(
|
2021-06-02 06:29:58 -04:00
|
|
|
specifier.clone(),
|
|
|
|
1,
|
2021-10-28 19:56:01 -04:00
|
|
|
"javascript".parse().unwrap(),
|
2022-05-20 16:40:55 -04:00
|
|
|
content.into(),
|
2021-06-02 06:29:58 -04:00
|
|
|
);
|
2021-10-28 19:56:01 -04:00
|
|
|
documents
|
2021-01-22 05:03:16 -05:00
|
|
|
.change(
|
|
|
|
&specifier,
|
|
|
|
2,
|
2021-01-29 14:34:33 -05:00
|
|
|
vec![lsp::TextDocumentContentChangeEvent {
|
|
|
|
range: Some(lsp::Range {
|
|
|
|
start: lsp::Position {
|
2021-10-28 19:56:01 -04:00
|
|
|
line: 1,
|
|
|
|
character: 13,
|
2021-01-22 05:03:16 -05:00
|
|
|
},
|
2021-01-29 14:34:33 -05:00
|
|
|
end: lsp::Position {
|
2021-10-28 19:56:01 -04:00
|
|
|
line: 1,
|
|
|
|
character: 13,
|
2021-01-22 05:03:16 -05:00
|
|
|
},
|
|
|
|
}),
|
2021-10-28 19:56:01 -04:00
|
|
|
range_length: None,
|
|
|
|
text: r#", "hello deno""#.to_string(),
|
2021-01-22 05:03:16 -05:00
|
|
|
}],
|
|
|
|
)
|
2021-10-28 19:56:01 -04:00
|
|
|
.unwrap();
|
|
|
|
assert_eq!(
|
2022-05-20 16:40:55 -04:00
|
|
|
&*documents.get(&specifier).unwrap().content(),
|
2021-10-28 19:56:01 -04:00
|
|
|
r#"import * as b from "./b.ts";
|
|
|
|
console.log(b, "hello deno");
|
|
|
|
"#
|
2021-08-18 23:19:12 -04:00
|
|
|
);
|
2021-06-02 06:29:58 -04:00
|
|
|
}
|
2021-11-18 13:50:24 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_documents_ensure_no_duplicates() {
|
|
|
|
// it should never happen that a user of this API causes this to happen,
|
|
|
|
// but we'll guard against it anyway
|
2022-04-01 11:15:37 -04:00
|
|
|
let temp_dir = TempDir::new();
|
|
|
|
let (mut documents, documents_path) = setup(&temp_dir);
|
2021-11-18 13:50:24 -05:00
|
|
|
let file_path = documents_path.join("file.ts");
|
|
|
|
let file_specifier = ModuleSpecifier::from_file_path(&file_path).unwrap();
|
2023-06-10 11:09:45 -04:00
|
|
|
documents_path.create_dir_all();
|
|
|
|
file_path.write("");
|
2021-11-18 13:50:24 -05:00
|
|
|
|
|
|
|
// open the document
|
|
|
|
documents.open(
|
|
|
|
file_specifier.clone(),
|
|
|
|
1,
|
|
|
|
LanguageId::TypeScript,
|
2022-05-20 16:40:55 -04:00
|
|
|
"".into(),
|
2021-11-18 13:50:24 -05:00
|
|
|
);
|
|
|
|
|
|
|
|
// make a clone of the document store and close the document in that one
|
|
|
|
let mut documents2 = documents.clone();
|
|
|
|
documents2.close(&file_specifier).unwrap();
|
|
|
|
|
|
|
|
// At this point the document will be in both documents and the shared file system documents.
|
|
|
|
// Now make sure that the original documents doesn't return both copies
|
2023-03-29 16:25:48 -04:00
|
|
|
assert_eq!(documents.documents(DocumentsFilter::All).len(), 1);
|
2021-11-18 13:50:24 -05:00
|
|
|
}
|
2023-01-28 10:18:32 -05:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_documents_refresh_dependencies_config_change() {
|
2023-04-14 16:22:33 -04:00
|
|
|
let npm_registry_api = Arc::new(CliNpmRegistryApi::new_uninitialized());
|
|
|
|
let npm_resolution = Arc::new(NpmResolution::from_serialized(
|
|
|
|
npm_registry_api.clone(),
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
));
|
2023-02-22 14:15:25 -05:00
|
|
|
|
2023-01-28 10:18:32 -05:00
|
|
|
// it should never happen that a user of this API causes this to happen,
|
|
|
|
// but we'll guard against it anyway
|
|
|
|
let temp_dir = TempDir::new();
|
|
|
|
let (mut documents, documents_path) = setup(&temp_dir);
|
|
|
|
fs::create_dir_all(&documents_path).unwrap();
|
|
|
|
|
|
|
|
let file1_path = documents_path.join("file1.ts");
|
|
|
|
let file1_specifier = ModuleSpecifier::from_file_path(&file1_path).unwrap();
|
|
|
|
fs::write(&file1_path, "").unwrap();
|
|
|
|
|
|
|
|
let file2_path = documents_path.join("file2.ts");
|
|
|
|
let file2_specifier = ModuleSpecifier::from_file_path(&file2_path).unwrap();
|
|
|
|
fs::write(&file2_path, "").unwrap();
|
|
|
|
|
|
|
|
let file3_path = documents_path.join("file3.ts");
|
|
|
|
let file3_specifier = ModuleSpecifier::from_file_path(&file3_path).unwrap();
|
|
|
|
fs::write(&file3_path, "").unwrap();
|
|
|
|
|
|
|
|
// set the initial import map and point to file 2
|
|
|
|
{
|
|
|
|
let mut import_map = ImportMap::new(
|
|
|
|
ModuleSpecifier::from_file_path(documents_path.join("import_map.json"))
|
|
|
|
.unwrap(),
|
|
|
|
);
|
|
|
|
import_map
|
|
|
|
.imports_mut()
|
|
|
|
.append("test".to_string(), "./file2.ts".to_string())
|
|
|
|
.unwrap();
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
documents.update_config(UpdateDocumentConfigOptions {
|
|
|
|
enabled_urls: vec![],
|
|
|
|
document_preload_limit: 1_000,
|
|
|
|
maybe_import_map: Some(Arc::new(import_map)),
|
|
|
|
maybe_config_file: None,
|
|
|
|
maybe_package_json: None,
|
|
|
|
npm_registry_api: npm_registry_api.clone(),
|
|
|
|
npm_resolution: npm_resolution.clone(),
|
|
|
|
});
|
2023-01-28 10:18:32 -05:00
|
|
|
|
|
|
|
// open the document
|
|
|
|
let document = documents.open(
|
|
|
|
file1_specifier.clone(),
|
|
|
|
1,
|
|
|
|
LanguageId::TypeScript,
|
|
|
|
"import {} from 'test';".into(),
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
document
|
|
|
|
.dependencies()
|
|
|
|
.get("test")
|
|
|
|
.unwrap()
|
|
|
|
.maybe_code
|
|
|
|
.maybe_specifier()
|
|
|
|
.map(ToOwned::to_owned),
|
|
|
|
Some(file2_specifier),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// now point at file 3
|
|
|
|
{
|
|
|
|
let mut import_map = ImportMap::new(
|
|
|
|
ModuleSpecifier::from_file_path(documents_path.join("import_map.json"))
|
|
|
|
.unwrap(),
|
|
|
|
);
|
|
|
|
import_map
|
|
|
|
.imports_mut()
|
|
|
|
.append("test".to_string(), "./file3.ts".to_string())
|
|
|
|
.unwrap();
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
documents.update_config(UpdateDocumentConfigOptions {
|
|
|
|
enabled_urls: vec![],
|
|
|
|
document_preload_limit: 1_000,
|
|
|
|
maybe_import_map: Some(Arc::new(import_map)),
|
|
|
|
maybe_config_file: None,
|
|
|
|
maybe_package_json: None,
|
2023-02-22 14:15:25 -05:00
|
|
|
npm_registry_api,
|
|
|
|
npm_resolution,
|
2023-05-11 17:17:14 -04:00
|
|
|
});
|
2023-01-28 10:18:32 -05:00
|
|
|
|
|
|
|
// check the document's dependencies
|
|
|
|
let document = documents.get(&file1_specifier).unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
document
|
|
|
|
.dependencies()
|
|
|
|
.get("test")
|
|
|
|
.unwrap()
|
|
|
|
.maybe_code
|
|
|
|
.maybe_specifier()
|
|
|
|
.map(ToOwned::to_owned),
|
|
|
|
Some(file3_specifier),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-03-30 17:47:53 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
pub fn test_pre_load_document_finder() {
|
|
|
|
let temp_dir = TempDir::new();
|
|
|
|
temp_dir.create_dir_all("root1/node_modules/");
|
|
|
|
temp_dir.write("root1/node_modules/mod.ts", ""); // no, node_modules
|
|
|
|
|
|
|
|
temp_dir.create_dir_all("root1/sub_dir");
|
2023-04-01 15:10:30 -04:00
|
|
|
temp_dir.create_dir_all("root1/target");
|
|
|
|
temp_dir.create_dir_all("root1/node_modules");
|
2023-03-30 17:47:53 -04:00
|
|
|
temp_dir.create_dir_all("root1/.git");
|
|
|
|
temp_dir.create_dir_all("root1/file.ts"); // no, directory
|
|
|
|
temp_dir.write("root1/mod1.ts", ""); // yes
|
|
|
|
temp_dir.write("root1/mod2.js", ""); // yes
|
|
|
|
temp_dir.write("root1/mod3.tsx", ""); // yes
|
|
|
|
temp_dir.write("root1/mod4.d.ts", ""); // yes
|
|
|
|
temp_dir.write("root1/mod5.jsx", ""); // yes
|
|
|
|
temp_dir.write("root1/mod6.mjs", ""); // yes
|
|
|
|
temp_dir.write("root1/mod7.mts", ""); // yes
|
|
|
|
temp_dir.write("root1/mod8.d.mts", ""); // yes
|
|
|
|
temp_dir.write("root1/other.json", ""); // no, json
|
|
|
|
temp_dir.write("root1/other.txt", ""); // no, text file
|
|
|
|
temp_dir.write("root1/other.wasm", ""); // no, don't load wasm
|
2023-04-01 15:10:30 -04:00
|
|
|
temp_dir.write("root1/Cargo.toml", ""); // no
|
2023-03-30 17:47:53 -04:00
|
|
|
temp_dir.write("root1/sub_dir/mod.ts", ""); // yes
|
|
|
|
temp_dir.write("root1/sub_dir/data.min.ts", ""); // no, minified file
|
|
|
|
temp_dir.write("root1/.git/main.ts", ""); // no, .git folder
|
2023-04-01 15:10:30 -04:00
|
|
|
temp_dir.write("root1/node_modules/main.ts", ""); // no, because it's in a node_modules folder
|
|
|
|
temp_dir.write("root1/target/main.ts", ""); // no, because there is a Cargo.toml in the root directory
|
2023-03-30 17:47:53 -04:00
|
|
|
|
|
|
|
temp_dir.create_dir_all("root2/folder");
|
|
|
|
temp_dir.write("root2/file1.ts", ""); // yes, provided
|
|
|
|
temp_dir.write("root2/file2.ts", ""); // no, not provided
|
2023-04-01 15:10:30 -04:00
|
|
|
temp_dir.write("root2/main.min.ts", ""); // yes, provided
|
2023-03-30 17:47:53 -04:00
|
|
|
temp_dir.write("root2/folder/main.ts", ""); // yes, provided
|
|
|
|
|
|
|
|
temp_dir.create_dir_all("root3/");
|
|
|
|
temp_dir.write("root3/mod.ts", ""); // no, not provided
|
|
|
|
|
2023-05-11 17:17:14 -04:00
|
|
|
let mut urls = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
|
|
|
&vec![
|
|
|
|
temp_dir.uri().join("root1/").unwrap(),
|
|
|
|
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
|
|
|
temp_dir.uri().join("root2/main.min.ts").unwrap(),
|
|
|
|
temp_dir.uri().join("root2/folder/").unwrap(),
|
|
|
|
],
|
|
|
|
1_000,
|
|
|
|
)
|
2023-03-30 17:47:53 -04:00
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2023-04-01 15:10:30 -04:00
|
|
|
// Ideally we would test for order here, which should be BFS, but
|
|
|
|
// different file systems have different directory iteration
|
|
|
|
// so we sort the results
|
2023-03-30 17:47:53 -04:00
|
|
|
urls.sort();
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
urls,
|
|
|
|
vec![
|
|
|
|
temp_dir.uri().join("root1/mod1.ts").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod2.js").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod3.tsx").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod4.d.ts").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod5.jsx").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod6.mjs").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod7.mts").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/mod8.d.mts").unwrap(),
|
|
|
|
temp_dir.uri().join("root1/sub_dir/mod.ts").unwrap(),
|
|
|
|
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
|
|
|
temp_dir.uri().join("root2/folder/main.ts").unwrap(),
|
2023-04-01 15:10:30 -04:00
|
|
|
temp_dir.uri().join("root2/main.min.ts").unwrap(),
|
2023-03-30 17:47:53 -04:00
|
|
|
]
|
|
|
|
);
|
2023-04-01 15:10:30 -04:00
|
|
|
|
|
|
|
// now try iterating with a low limit
|
|
|
|
let urls = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
|
|
|
&vec![temp_dir.uri()],
|
|
|
|
10, // entries and not results
|
|
|
|
)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
// since different file system have different iteration
|
|
|
|
// order, the number here may vary, so just assert it's below
|
|
|
|
// a certain amount
|
|
|
|
assert!(urls.len() < 5, "Actual length: {}", urls.len());
|
2023-03-30 17:47:53 -04:00
|
|
|
}
|
2023-04-01 12:02:44 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
pub fn test_pre_load_document_finder_disallowed_dirs() {
|
|
|
|
if cfg!(windows) {
|
2023-05-11 17:17:14 -04:00
|
|
|
let paths = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
|
|
|
&vec![Url::parse("file:///c:/").unwrap()],
|
|
|
|
1_000,
|
2023-04-01 15:10:30 -04:00
|
|
|
)
|
|
|
|
.collect::<Vec<_>>();
|
2023-04-01 12:02:44 -04:00
|
|
|
assert_eq!(paths, vec![]);
|
|
|
|
} else {
|
2023-05-11 17:17:14 -04:00
|
|
|
let paths = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
|
|
|
&vec![Url::parse("file:///").unwrap()],
|
|
|
|
1_000,
|
|
|
|
)
|
|
|
|
.collect::<Vec<_>>();
|
2023-04-01 12:02:44 -04:00
|
|
|
assert_eq!(paths, vec![]);
|
|
|
|
}
|
|
|
|
}
|
2023-04-01 15:10:30 -04:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_sort_and_remove_non_leaf_dirs() {
|
|
|
|
fn run_test(paths: Vec<&str>, expected_output: Vec<&str>) {
|
|
|
|
let paths = sort_and_remove_non_leaf_dirs(
|
|
|
|
paths.into_iter().map(PathBuf::from).collect(),
|
|
|
|
);
|
|
|
|
let dirs: Vec<_> =
|
|
|
|
paths.iter().map(|dir| dir.to_string_lossy()).collect();
|
|
|
|
assert_eq!(dirs, expected_output);
|
|
|
|
}
|
|
|
|
|
|
|
|
run_test(
|
|
|
|
vec![
|
|
|
|
"/test/asdf/test/asdf/",
|
|
|
|
"/test/asdf/test/asdf/test.ts",
|
|
|
|
"/test/asdf/",
|
|
|
|
"/test/asdf/",
|
|
|
|
"/testing/456/893/",
|
|
|
|
"/testing/456/893/test/",
|
|
|
|
],
|
|
|
|
vec!["/test/asdf/", "/testing/456/893/"],
|
|
|
|
);
|
|
|
|
run_test(vec![], vec![]);
|
|
|
|
}
|
2021-01-22 05:03:16 -05:00
|
|
|
}
|