mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
Merge branch 'main' into DENO_ROOT_INSTALL_Fix
This commit is contained in:
commit
6c7fc66de7
20 changed files with 630 additions and 144 deletions
|
@ -164,8 +164,19 @@ fn get_validated_scheme(
|
|||
) -> Result<String, AnyError> {
|
||||
let scheme = specifier.scheme();
|
||||
if !SUPPORTED_SCHEMES.contains(&scheme) {
|
||||
// NOTE(bartlomieju): this message list additional `npm` and `jsr` schemes, but they should actually be handled
|
||||
// before `file_fetcher.rs` APIs are even hit.
|
||||
let mut all_supported_schemes = SUPPORTED_SCHEMES.to_vec();
|
||||
all_supported_schemes.extend_from_slice(&["npm", "jsr"]);
|
||||
all_supported_schemes.sort();
|
||||
let scheme_list = all_supported_schemes
|
||||
.iter()
|
||||
.map(|scheme| format!(" - \"{}\"", scheme))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Err(generic_error(format!(
|
||||
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes: {SUPPORTED_SCHEMES:#?}"
|
||||
"Unsupported scheme \"{scheme}\" for module \"{specifier}\". Supported schemes:\n{}",
|
||||
scheme_list
|
||||
)))
|
||||
} else {
|
||||
Ok(scheme.to_string())
|
||||
|
|
|
@ -5,6 +5,7 @@ use super::cache::LspCache;
|
|||
use super::config::Config;
|
||||
use super::resolver::LspIsCjsResolver;
|
||||
use super::resolver::LspResolver;
|
||||
use super::resolver::ScopeDepInfo;
|
||||
use super::resolver::SingleReferrerGraphResolver;
|
||||
use super::testing::TestCollector;
|
||||
use super::testing::TestModule;
|
||||
|
@ -38,7 +39,6 @@ use indexmap::IndexSet;
|
|||
use node_resolver::NodeModuleKind;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
|
@ -989,12 +989,7 @@ pub struct Documents {
|
|||
open_docs: HashMap<ModuleSpecifier, Arc<Document>>,
|
||||
/// Documents stored on the file system.
|
||||
file_system_docs: Arc<FileSystemDocuments>,
|
||||
/// The npm package requirements found in npm specifiers.
|
||||
npm_reqs_by_scope:
|
||||
Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>>,
|
||||
/// Config scopes that contain a node: specifier such that a @types/node
|
||||
/// package should be injected.
|
||||
scopes_with_node_specifier: Arc<HashSet<Option<ModuleSpecifier>>>,
|
||||
dep_info_by_scope: Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>>,
|
||||
}
|
||||
|
||||
impl Documents {
|
||||
|
@ -1157,17 +1152,20 @@ impl Documents {
|
|||
false
|
||||
}
|
||||
|
||||
pub fn npm_reqs_by_scope(
|
||||
pub fn dep_info_by_scope(
|
||||
&mut self,
|
||||
) -> Arc<BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>> {
|
||||
self.calculate_npm_reqs_if_dirty();
|
||||
self.npm_reqs_by_scope.clone()
|
||||
) -> Arc<BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>> {
|
||||
self.calculate_dep_info_if_dirty();
|
||||
self.dep_info_by_scope.clone()
|
||||
}
|
||||
|
||||
pub fn scopes_with_node_specifier(
|
||||
&self,
|
||||
) -> &Arc<HashSet<Option<ModuleSpecifier>>> {
|
||||
&self.scopes_with_node_specifier
|
||||
pub fn scopes_with_node_specifier(&self) -> HashSet<Option<ModuleSpecifier>> {
|
||||
self
|
||||
.dep_info_by_scope
|
||||
.iter()
|
||||
.filter(|(_, i)| i.has_node_specifier)
|
||||
.map(|(s, _)| s.clone())
|
||||
.collect::<HashSet<_>>()
|
||||
}
|
||||
|
||||
/// Return a document for the specifier.
|
||||
|
@ -1410,34 +1408,46 @@ impl Documents {
|
|||
/// Iterate through the documents, building a map where the key is a unique
|
||||
/// document and the value is a set of specifiers that depend on that
|
||||
/// document.
|
||||
fn calculate_npm_reqs_if_dirty(&mut self) {
|
||||
let mut npm_reqs_by_scope: BTreeMap<_, BTreeSet<_>> = Default::default();
|
||||
let mut scopes_with_specifier = HashSet::new();
|
||||
fn calculate_dep_info_if_dirty(&mut self) {
|
||||
let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default();
|
||||
let is_fs_docs_dirty = self.file_system_docs.set_dirty(false);
|
||||
if !is_fs_docs_dirty && !self.dirty {
|
||||
return;
|
||||
}
|
||||
let mut visit_doc = |doc: &Arc<Document>| {
|
||||
let scope = doc.scope();
|
||||
let reqs = npm_reqs_by_scope.entry(scope.cloned()).or_default();
|
||||
let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default();
|
||||
for dependency in doc.dependencies().values() {
|
||||
if let Some(dep) = dependency.get_code() {
|
||||
let code_specifier = dependency.get_code();
|
||||
let type_specifier = dependency.get_type();
|
||||
if let Some(dep) = code_specifier {
|
||||
if dep.scheme() == "node" {
|
||||
scopes_with_specifier.insert(scope.cloned());
|
||||
dep_info.has_node_specifier = true;
|
||||
}
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
dep_info.npm_reqs.insert(reference.into_inner().req);
|
||||
}
|
||||
}
|
||||
if let Some(dep) = dependency.get_type() {
|
||||
if let Some(dep) = type_specifier {
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
dep_info.npm_reqs.insert(reference.into_inner().req);
|
||||
}
|
||||
}
|
||||
if dependency.maybe_deno_types_specifier.is_some() {
|
||||
if let (Some(code_specifier), Some(type_specifier)) =
|
||||
(code_specifier, type_specifier)
|
||||
{
|
||||
if MediaType::from_specifier(type_specifier).is_declaration() {
|
||||
dep_info
|
||||
.deno_types_to_code_resolutions
|
||||
.insert(type_specifier.clone(), code_specifier.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() {
|
||||
if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) {
|
||||
reqs.insert(reference.into_inner().req);
|
||||
dep_info.npm_reqs.insert(reference.into_inner().req);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1448,14 +1458,49 @@ impl Documents {
|
|||
visit_doc(doc);
|
||||
}
|
||||
|
||||
// fill the reqs from the lockfile
|
||||
for (scope, config_data) in self.config.tree.data_by_scope().as_ref() {
|
||||
let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default();
|
||||
(|| {
|
||||
let config_file = config_data.maybe_deno_json()?;
|
||||
let jsx_config =
|
||||
config_file.to_maybe_jsx_import_source_config().ok()??;
|
||||
let type_specifier = jsx_config.default_types_specifier.as_ref()?;
|
||||
let code_specifier = jsx_config.default_specifier.as_ref()?;
|
||||
let cli_resolver = self.resolver.as_cli_resolver(Some(scope));
|
||||
let range = deno_graph::Range {
|
||||
specifier: jsx_config.base_url.clone(),
|
||||
start: deno_graph::Position::zeroed(),
|
||||
end: deno_graph::Position::zeroed(),
|
||||
};
|
||||
let type_specifier = cli_resolver
|
||||
.resolve(
|
||||
type_specifier,
|
||||
&range,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
deno_package_json::NodeModuleKind::Esm,
|
||||
ResolutionMode::Types,
|
||||
)
|
||||
.ok()?;
|
||||
let code_specifier = cli_resolver
|
||||
.resolve(
|
||||
code_specifier,
|
||||
&range,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
deno_package_json::NodeModuleKind::Esm,
|
||||
ResolutionMode::Execution,
|
||||
)
|
||||
.ok()?;
|
||||
dep_info
|
||||
.deno_types_to_code_resolutions
|
||||
.insert(type_specifier, code_specifier);
|
||||
Some(())
|
||||
})();
|
||||
// fill the reqs from the lockfile
|
||||
if let Some(lockfile) = config_data.lockfile.as_ref() {
|
||||
let reqs = npm_reqs_by_scope.entry(Some(scope.clone())).or_default();
|
||||
let lockfile = lockfile.lock();
|
||||
for dep_req in lockfile.content.packages.specifiers.keys() {
|
||||
if dep_req.kind == deno_semver::package::PackageKind::Npm {
|
||||
reqs.insert(dep_req.req.clone());
|
||||
dep_info.npm_reqs.insert(dep_req.req.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1464,15 +1509,22 @@ impl Documents {
|
|||
// Ensure a @types/node package exists when any module uses a node: specifier.
|
||||
// Unlike on the command line, here we just add @types/node to the npm package
|
||||
// requirements since this won't end up in the lockfile.
|
||||
for scope in &scopes_with_specifier {
|
||||
let reqs = npm_reqs_by_scope.entry(scope.clone()).or_default();
|
||||
if !reqs.iter().any(|r| r.name == "@types/node") {
|
||||
reqs.insert(PackageReq::from_str("@types/node").unwrap());
|
||||
for dep_info in dep_info_by_scope.values_mut() {
|
||||
if dep_info.has_node_specifier
|
||||
&& !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node")
|
||||
{
|
||||
dep_info
|
||||
.npm_reqs
|
||||
.insert(PackageReq::from_str("@types/node").unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
self.npm_reqs_by_scope = Arc::new(npm_reqs_by_scope);
|
||||
self.scopes_with_node_specifier = Arc::new(scopes_with_specifier);
|
||||
self.dep_info_by_scope = Arc::new(
|
||||
dep_info_by_scope
|
||||
.into_iter()
|
||||
.map(|(s, i)| (s, Arc::new(i)))
|
||||
.collect(),
|
||||
);
|
||||
self.dirty = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -1036,7 +1036,7 @@ impl Inner {
|
|||
|
||||
// refresh the npm specifiers because it might have discovered
|
||||
// a @types/node package and now's a good time to do that anyway
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
|
||||
self.project_changed([], true);
|
||||
}
|
||||
|
@ -1082,7 +1082,7 @@ impl Inner {
|
|||
);
|
||||
if document.is_diagnosable() {
|
||||
self.project_changed([(document.specifier(), ChangeKind::Opened)], false);
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
self.diagnostics_server.invalidate(&[specifier]);
|
||||
self.send_diagnostics_update();
|
||||
self.send_testing_update();
|
||||
|
@ -1103,8 +1103,8 @@ impl Inner {
|
|||
Ok(document) => {
|
||||
if document.is_diagnosable() {
|
||||
let old_scopes_with_node_specifier =
|
||||
self.documents.scopes_with_node_specifier().clone();
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.documents.scopes_with_node_specifier();
|
||||
self.refresh_dep_info().await;
|
||||
let mut config_changed = false;
|
||||
if !self
|
||||
.documents
|
||||
|
@ -1155,13 +1155,15 @@ impl Inner {
|
|||
}));
|
||||
}
|
||||
|
||||
async fn refresh_npm_specifiers(&mut self) {
|
||||
let package_reqs = self.documents.npm_reqs_by_scope();
|
||||
async fn refresh_dep_info(&mut self) {
|
||||
let dep_info_by_scope = self.documents.dep_info_by_scope();
|
||||
let resolver = self.resolver.clone();
|
||||
// spawn due to the lsp's `Send` requirement
|
||||
spawn(async move { resolver.set_npm_reqs(&package_reqs).await })
|
||||
.await
|
||||
.ok();
|
||||
spawn(
|
||||
async move { resolver.set_dep_info_by_scope(&dep_info_by_scope).await },
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
}
|
||||
|
||||
async fn did_close(&mut self, params: DidCloseTextDocumentParams) {
|
||||
|
@ -1180,7 +1182,7 @@ impl Inner {
|
|||
.uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File);
|
||||
self.diagnostics_state.clear(&specifier);
|
||||
if self.is_diagnosable(&specifier) {
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
self.diagnostics_server.invalidate(&[specifier.clone()]);
|
||||
self.send_diagnostics_update();
|
||||
self.send_testing_update();
|
||||
|
@ -3600,15 +3602,16 @@ impl Inner {
|
|||
|
||||
if byonm {
|
||||
roots.retain(|s| s.scheme() != "npm");
|
||||
} else if let Some(npm_reqs) = self
|
||||
} else if let Some(dep_info) = self
|
||||
.documents
|
||||
.npm_reqs_by_scope()
|
||||
.dep_info_by_scope()
|
||||
.get(&config_data.map(|d| d.scope.as_ref().clone()))
|
||||
{
|
||||
// always include the npm packages since resolution of one npm package
|
||||
// might affect the resolution of other npm packages
|
||||
roots.extend(
|
||||
npm_reqs
|
||||
dep_info
|
||||
.npm_reqs
|
||||
.iter()
|
||||
.map(|req| ModuleSpecifier::parse(&format!("npm:{}", req)).unwrap()),
|
||||
);
|
||||
|
@ -3686,7 +3689,7 @@ impl Inner {
|
|||
|
||||
async fn post_cache(&mut self) {
|
||||
self.resolver.did_cache();
|
||||
self.refresh_npm_specifiers().await;
|
||||
self.refresh_dep_info().await;
|
||||
self.diagnostics_server.invalidate_all();
|
||||
self.project_changed([], true);
|
||||
self.ts_server.cleanup_semantic_cache(self.snapshot()).await;
|
||||
|
|
|
@ -7,6 +7,7 @@ use deno_cache_dir::HttpCache;
|
|||
use deno_config::deno_json::JsxImportSourceConfig;
|
||||
use deno_config::workspace::PackageJsonDepResolution;
|
||||
use deno_config::workspace::WorkspaceResolver;
|
||||
use deno_core::parking_lot::Mutex;
|
||||
use deno_core::url::Url;
|
||||
use deno_graph::source::ResolutionMode;
|
||||
use deno_graph::GraphImport;
|
||||
|
@ -84,6 +85,7 @@ struct LspScopeResolver {
|
|||
pkg_json_resolver: Arc<PackageJsonResolver>,
|
||||
redirect_resolver: Option<Arc<RedirectResolver>>,
|
||||
graph_imports: Arc<IndexMap<ModuleSpecifier, GraphImport>>,
|
||||
dep_info: Arc<Mutex<Arc<ScopeDepInfo>>>,
|
||||
package_json_deps_by_resolution: Arc<IndexMap<ModuleSpecifier, String>>,
|
||||
config_data: Option<Arc<ConfigData>>,
|
||||
}
|
||||
|
@ -101,6 +103,7 @@ impl Default for LspScopeResolver {
|
|||
pkg_json_resolver: factory.pkg_json_resolver().clone(),
|
||||
redirect_resolver: None,
|
||||
graph_imports: Default::default(),
|
||||
dep_info: Default::default(),
|
||||
package_json_deps_by_resolution: Default::default(),
|
||||
config_data: None,
|
||||
}
|
||||
|
@ -180,6 +183,15 @@ impl LspScopeResolver {
|
|||
NodeModuleKind::Esm,
|
||||
NodeResolutionMode::Types,
|
||||
)
|
||||
.or_else(|_| {
|
||||
npm_pkg_req_resolver.resolve_req_reference(
|
||||
&req_ref,
|
||||
&referrer,
|
||||
// todo(dsherret): this is wrong because it doesn't consider CJS referrers
|
||||
NodeModuleKind::Esm,
|
||||
NodeResolutionMode::Execution,
|
||||
)
|
||||
})
|
||||
.ok()?,
|
||||
))
|
||||
.0;
|
||||
|
@ -200,6 +212,7 @@ impl LspScopeResolver {
|
|||
pkg_json_resolver,
|
||||
redirect_resolver,
|
||||
graph_imports,
|
||||
dep_info: Default::default(),
|
||||
package_json_deps_by_resolution,
|
||||
config_data: config_data.cloned(),
|
||||
}
|
||||
|
@ -222,6 +235,7 @@ impl LspScopeResolver {
|
|||
redirect_resolver: self.redirect_resolver.clone(),
|
||||
pkg_json_resolver: factory.pkg_json_resolver().clone(),
|
||||
graph_imports: self.graph_imports.clone(),
|
||||
dep_info: self.dep_info.clone(),
|
||||
package_json_deps_by_resolution: self
|
||||
.package_json_deps_by_resolution
|
||||
.clone(),
|
||||
|
@ -288,19 +302,24 @@ impl LspResolver {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn set_npm_reqs(
|
||||
pub async fn set_dep_info_by_scope(
|
||||
&self,
|
||||
reqs: &BTreeMap<Option<ModuleSpecifier>, BTreeSet<PackageReq>>,
|
||||
dep_info_by_scope: &Arc<
|
||||
BTreeMap<Option<ModuleSpecifier>, Arc<ScopeDepInfo>>,
|
||||
>,
|
||||
) {
|
||||
for (scope, resolver) in [(None, &self.unscoped)]
|
||||
.into_iter()
|
||||
.chain(self.by_scope.iter().map(|(s, r)| (Some(s), r)))
|
||||
{
|
||||
let dep_info = dep_info_by_scope.get(&scope.cloned());
|
||||
if let Some(dep_info) = dep_info {
|
||||
*resolver.dep_info.lock() = dep_info.clone();
|
||||
}
|
||||
if let Some(npm_resolver) = resolver.npm_resolver.as_ref() {
|
||||
if let Some(npm_resolver) = npm_resolver.as_managed() {
|
||||
let reqs = reqs
|
||||
.get(&scope.cloned())
|
||||
.map(|reqs| reqs.iter().cloned().collect::<Vec<_>>())
|
||||
let reqs = dep_info
|
||||
.map(|i| i.npm_reqs.iter().cloned().collect::<Vec<_>>())
|
||||
.unwrap_or_default();
|
||||
if let Err(err) = npm_resolver.set_package_reqs(&reqs).await {
|
||||
lsp_warn!("Could not set npm package requirements: {:#}", err);
|
||||
|
@ -434,6 +453,19 @@ impl LspResolver {
|
|||
.cloned()
|
||||
}
|
||||
|
||||
pub fn deno_types_to_code_resolution(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
file_referrer: Option<&ModuleSpecifier>,
|
||||
) -> Option<ModuleSpecifier> {
|
||||
let resolver = self.get_scope_resolver(file_referrer);
|
||||
let dep_info = resolver.dep_info.lock().clone();
|
||||
dep_info
|
||||
.deno_types_to_code_resolutions
|
||||
.get(specifier)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn in_node_modules(&self, specifier: &ModuleSpecifier) -> bool {
|
||||
fn has_node_modules_dir(specifier: &ModuleSpecifier) -> bool {
|
||||
// consider any /node_modules/ directory as being in the node_modules
|
||||
|
@ -538,6 +570,13 @@ impl LspResolver {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct ScopeDepInfo {
|
||||
pub deno_types_to_code_resolutions: HashMap<ModuleSpecifier, ModuleSpecifier>,
|
||||
pub npm_reqs: BTreeSet<PackageReq>,
|
||||
pub has_node_specifier: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ResolverFactoryServices {
|
||||
cli_resolver: Deferred<Arc<CliResolver>>,
|
||||
|
|
|
@ -3417,9 +3417,18 @@ fn parse_code_actions(
|
|||
additional_text_edits.extend(change.text_changes.iter().map(|tc| {
|
||||
let mut text_edit = tc.as_text_edit(asset_or_doc.line_index());
|
||||
if let Some(specifier_rewrite) = &data.specifier_rewrite {
|
||||
text_edit.new_text = text_edit
|
||||
.new_text
|
||||
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
|
||||
text_edit.new_text = text_edit.new_text.replace(
|
||||
&specifier_rewrite.old_specifier,
|
||||
&specifier_rewrite.new_specifier,
|
||||
);
|
||||
if let Some(deno_types_specifier) =
|
||||
&specifier_rewrite.new_deno_types_specifier
|
||||
{
|
||||
text_edit.new_text = format!(
|
||||
"// @deno-types=\"{}\"\n{}",
|
||||
deno_types_specifier, &text_edit.new_text
|
||||
);
|
||||
}
|
||||
}
|
||||
text_edit
|
||||
}));
|
||||
|
@ -3578,17 +3587,23 @@ impl CompletionEntryDetails {
|
|||
let mut text_edit = original_item.text_edit.clone();
|
||||
if let Some(specifier_rewrite) = &data.specifier_rewrite {
|
||||
if let Some(text_edit) = &mut text_edit {
|
||||
match text_edit {
|
||||
lsp::CompletionTextEdit::Edit(text_edit) => {
|
||||
text_edit.new_text = text_edit
|
||||
.new_text
|
||||
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
|
||||
}
|
||||
let new_text = match text_edit {
|
||||
lsp::CompletionTextEdit::Edit(text_edit) => &mut text_edit.new_text,
|
||||
lsp::CompletionTextEdit::InsertAndReplace(insert_replace_edit) => {
|
||||
insert_replace_edit.new_text = insert_replace_edit
|
||||
.new_text
|
||||
.replace(&specifier_rewrite.0, &specifier_rewrite.1);
|
||||
&mut insert_replace_edit.new_text
|
||||
}
|
||||
};
|
||||
*new_text = new_text.replace(
|
||||
&specifier_rewrite.old_specifier,
|
||||
&specifier_rewrite.new_specifier,
|
||||
);
|
||||
if let Some(deno_types_specifier) =
|
||||
&specifier_rewrite.new_deno_types_specifier
|
||||
{
|
||||
*new_text = format!(
|
||||
"// @deno-types=\"{}\"\n{}",
|
||||
deno_types_specifier, new_text
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3693,6 +3708,13 @@ impl CompletionInfo {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct CompletionSpecifierRewrite {
|
||||
old_specifier: String,
|
||||
new_specifier: String,
|
||||
new_deno_types_specifier: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionItemData {
|
||||
|
@ -3705,7 +3727,7 @@ pub struct CompletionItemData {
|
|||
/// be rewritten by replacing the first string with the second. Intended for
|
||||
/// auto-import specifiers to be reverse-import-mapped.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub specifier_rewrite: Option<(String, String)>,
|
||||
pub specifier_rewrite: Option<CompletionSpecifierRewrite>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub data: Option<Value>,
|
||||
pub use_code_snippet: bool,
|
||||
|
@ -3927,20 +3949,40 @@ impl CompletionEntry {
|
|||
if let Some(source) = &self.source {
|
||||
let mut display_source = source.clone();
|
||||
if let Some(import_data) = &self.auto_import_data {
|
||||
if let Some(new_module_specifier) = language_server
|
||||
.get_ts_response_import_mapper(specifier)
|
||||
let import_mapper =
|
||||
language_server.get_ts_response_import_mapper(specifier);
|
||||
if let Some(mut new_specifier) = import_mapper
|
||||
.check_specifier(&import_data.normalized, specifier)
|
||||
.or_else(|| relative_specifier(specifier, &import_data.normalized))
|
||||
{
|
||||
if new_module_specifier.contains("/node_modules/") {
|
||||
if new_specifier.contains("/node_modules/") {
|
||||
return None;
|
||||
}
|
||||
display_source.clone_from(&new_module_specifier);
|
||||
if new_module_specifier != import_data.raw.module_specifier {
|
||||
specifier_rewrite = Some((
|
||||
import_data.raw.module_specifier.clone(),
|
||||
new_module_specifier,
|
||||
));
|
||||
let mut new_deno_types_specifier = None;
|
||||
if let Some(code_specifier) = language_server
|
||||
.resolver
|
||||
.deno_types_to_code_resolution(
|
||||
&import_data.normalized,
|
||||
Some(specifier),
|
||||
)
|
||||
.and_then(|s| {
|
||||
import_mapper
|
||||
.check_specifier(&s, specifier)
|
||||
.or_else(|| relative_specifier(specifier, &s))
|
||||
})
|
||||
{
|
||||
new_deno_types_specifier =
|
||||
Some(std::mem::replace(&mut new_specifier, code_specifier));
|
||||
}
|
||||
display_source.clone_from(&new_specifier);
|
||||
if new_specifier != import_data.raw.module_specifier
|
||||
|| new_deno_types_specifier.is_some()
|
||||
{
|
||||
specifier_rewrite = Some(CompletionSpecifierRewrite {
|
||||
old_specifier: import_data.raw.module_specifier.clone(),
|
||||
new_specifier,
|
||||
new_deno_types_specifier,
|
||||
});
|
||||
}
|
||||
} else if source.starts_with(jsr_url().as_str()) {
|
||||
return None;
|
||||
|
@ -4246,9 +4288,7 @@ impl TscSpecifierMap {
|
|||
return specifier.to_string();
|
||||
}
|
||||
let mut specifier = original.to_string();
|
||||
if specifier.contains("/node_modules/.deno/")
|
||||
&& !specifier.contains("/node_modules/@types/node/")
|
||||
{
|
||||
if !specifier.contains("/node_modules/@types/node/") {
|
||||
// The ts server doesn't give completions from files in
|
||||
// `node_modules/.deno/`. We work around it like this.
|
||||
specifier = specifier.replace("/node_modules/", "/$node_modules/");
|
||||
|
@ -4415,6 +4455,8 @@ fn op_load<'s>(
|
|||
})
|
||||
};
|
||||
|
||||
lsp_warn!("op_load {} {}", &specifier, maybe_load_response.is_some());
|
||||
|
||||
let serialized = serde_v8::to_v8(scope, maybe_load_response)?;
|
||||
|
||||
state.performance.measure(mark);
|
||||
|
|
|
@ -15,6 +15,7 @@ import { core, primordials } from "ext:core/mod.js";
|
|||
const {
|
||||
isAnyArrayBuffer,
|
||||
isArrayBuffer,
|
||||
isStringObject,
|
||||
} = core;
|
||||
const {
|
||||
ArrayBufferIsView,
|
||||
|
@ -466,6 +467,8 @@ function extractBody(object) {
|
|||
if (object.locked || isReadableStreamDisturbed(object)) {
|
||||
throw new TypeError("ReadableStream is locked or disturbed");
|
||||
}
|
||||
} else if (object[webidl.AsyncIterable] === webidl.AsyncIterable) {
|
||||
stream = ReadableStream.from(object.open());
|
||||
}
|
||||
if (typeof source === "string") {
|
||||
// WARNING: this deviates from spec (expects length to be set)
|
||||
|
@ -483,6 +486,9 @@ function extractBody(object) {
|
|||
return { body, contentType };
|
||||
}
|
||||
|
||||
webidl.converters["async iterable<Uint8Array>"] = webidl
|
||||
.createAsyncIterableConverter(webidl.converters.Uint8Array);
|
||||
|
||||
webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => {
|
||||
// Union for (ReadableStream or Blob or ArrayBufferView or ArrayBuffer or FormData or URLSearchParams or USVString)
|
||||
if (ObjectPrototypeIsPrototypeOf(ReadableStreamPrototype, V)) {
|
||||
|
@ -501,6 +507,14 @@ webidl.converters["BodyInit_DOMString"] = (V, prefix, context, opts) => {
|
|||
if (ArrayBufferIsView(V)) {
|
||||
return webidl.converters["ArrayBufferView"](V, prefix, context, opts);
|
||||
}
|
||||
if (webidl.isAsyncIterable(V) && !isStringObject(V)) {
|
||||
return webidl.converters["async iterable<Uint8Array>"](
|
||||
V,
|
||||
prefix,
|
||||
context,
|
||||
opts,
|
||||
);
|
||||
}
|
||||
}
|
||||
// BodyInit conversion is passed to extractBody(), which calls core.encode().
|
||||
// core.encode() will UTF-8 encode strings with replacement, being equivalent to the USV normalization.
|
||||
|
|
2
ext/fetch/lib.deno_fetch.d.ts
vendored
2
ext/fetch/lib.deno_fetch.d.ts
vendored
|
@ -163,6 +163,8 @@ type BodyInit =
|
|||
| FormData
|
||||
| URLSearchParams
|
||||
| ReadableStream<Uint8Array>
|
||||
| Iterable<Uint8Array>
|
||||
| AsyncIterable<Uint8Array>
|
||||
| string;
|
||||
/** @category Fetch */
|
||||
type RequestDestination =
|
||||
|
|
|
@ -70,7 +70,6 @@ const {
|
|||
String,
|
||||
Symbol,
|
||||
SymbolAsyncIterator,
|
||||
SymbolIterator,
|
||||
SymbolFor,
|
||||
TypeError,
|
||||
TypedArrayPrototypeGetBuffer,
|
||||
|
@ -5084,34 +5083,6 @@ function initializeCountSizeFunction(globalObject) {
|
|||
WeakMapPrototypeSet(countSizeFunctionWeakMap, globalObject, size);
|
||||
}
|
||||
|
||||
// Ref: https://tc39.es/ecma262/#sec-getiterator
|
||||
function getAsyncOrSyncIterator(obj) {
|
||||
let iterator;
|
||||
if (obj[SymbolAsyncIterator] != null) {
|
||||
iterator = obj[SymbolAsyncIterator]();
|
||||
if (!isObject(iterator)) {
|
||||
throw new TypeError(
|
||||
"[Symbol.asyncIterator] returned a non-object value",
|
||||
);
|
||||
}
|
||||
} else if (obj[SymbolIterator] != null) {
|
||||
iterator = obj[SymbolIterator]();
|
||||
if (!isObject(iterator)) {
|
||||
throw new TypeError("[Symbol.iterator] returned a non-object value");
|
||||
}
|
||||
} else {
|
||||
throw new TypeError("No iterator found");
|
||||
}
|
||||
if (typeof iterator.next !== "function") {
|
||||
throw new TypeError("iterator.next is not a function");
|
||||
}
|
||||
return iterator;
|
||||
}
|
||||
|
||||
function isObject(x) {
|
||||
return (typeof x === "object" && x != null) || typeof x === "function";
|
||||
}
|
||||
|
||||
const _resourceBacking = Symbol("[[resourceBacking]]");
|
||||
// This distinction exists to prevent unrefable streams being used in
|
||||
// regular fast streams that are unaware of refability
|
||||
|
@ -5197,21 +5168,22 @@ class ReadableStream {
|
|||
}
|
||||
|
||||
static from(asyncIterable) {
|
||||
const prefix = "Failed to execute 'ReadableStream.from'";
|
||||
webidl.requiredArguments(
|
||||
arguments.length,
|
||||
1,
|
||||
"Failed to execute 'ReadableStream.from'",
|
||||
prefix,
|
||||
);
|
||||
asyncIterable = webidl.converters.any(asyncIterable);
|
||||
|
||||
const iterator = getAsyncOrSyncIterator(asyncIterable);
|
||||
asyncIterable = webidl.converters["async iterable<any>"](
|
||||
asyncIterable,
|
||||
prefix,
|
||||
"Argument 1",
|
||||
);
|
||||
const iter = asyncIterable.open();
|
||||
|
||||
const stream = createReadableStream(noop, async () => {
|
||||
// deno-lint-ignore prefer-primordials
|
||||
const res = await iterator.next();
|
||||
if (!isObject(res)) {
|
||||
throw new TypeError("iterator.next value is not an object");
|
||||
}
|
||||
const res = await iter.next();
|
||||
if (res.done) {
|
||||
readableStreamDefaultControllerClose(stream[_controller]);
|
||||
} else {
|
||||
|
@ -5221,17 +5193,8 @@ class ReadableStream {
|
|||
);
|
||||
}
|
||||
}, async (reason) => {
|
||||
if (iterator.return == null) {
|
||||
return undefined;
|
||||
} else {
|
||||
// deno-lint-ignore prefer-primordials
|
||||
const res = await iterator.return(reason);
|
||||
if (!isObject(res)) {
|
||||
throw new TypeError("iterator.return value is not an object");
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
// deno-lint-ignore prefer-primordials
|
||||
await iter.return(reason);
|
||||
}, 0);
|
||||
return stream;
|
||||
}
|
||||
|
@ -6892,6 +6855,10 @@ webidl.converters.StreamPipeOptions = webidl
|
|||
{ key: "signal", converter: webidl.converters.AbortSignal },
|
||||
]);
|
||||
|
||||
webidl.converters["async iterable<any>"] = webidl.createAsyncIterableConverter(
|
||||
webidl.converters.any,
|
||||
);
|
||||
|
||||
internals.resourceForReadableStream = resourceForReadableStream;
|
||||
|
||||
export {
|
||||
|
|
|
@ -26,6 +26,7 @@ const {
|
|||
Float32Array,
|
||||
Float64Array,
|
||||
FunctionPrototypeBind,
|
||||
FunctionPrototypeCall,
|
||||
Int16Array,
|
||||
Int32Array,
|
||||
Int8Array,
|
||||
|
@ -77,6 +78,7 @@ const {
|
|||
StringPrototypeToWellFormed,
|
||||
Symbol,
|
||||
SymbolIterator,
|
||||
SymbolAsyncIterator,
|
||||
SymbolToStringTag,
|
||||
TypedArrayPrototypeGetBuffer,
|
||||
TypedArrayPrototypeGetSymbolToStringTag,
|
||||
|
@ -920,6 +922,127 @@ function createSequenceConverter(converter) {
|
|||
};
|
||||
}
|
||||
|
||||
function isAsyncIterable(obj) {
|
||||
if (obj[SymbolAsyncIterator] === undefined) {
|
||||
if (obj[SymbolIterator] === undefined) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
const AsyncIterable = Symbol("[[asyncIterable]]");
|
||||
|
||||
function createAsyncIterableConverter(converter) {
|
||||
return function (
|
||||
V,
|
||||
prefix = undefined,
|
||||
context = undefined,
|
||||
opts = { __proto__: null },
|
||||
) {
|
||||
if (type(V) !== "Object") {
|
||||
throw makeException(
|
||||
TypeError,
|
||||
"can not be converted to async iterable.",
|
||||
prefix,
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
let isAsync = true;
|
||||
let method = V[SymbolAsyncIterator];
|
||||
if (method === undefined) {
|
||||
method = V[SymbolIterator];
|
||||
|
||||
if (method === undefined) {
|
||||
throw makeException(
|
||||
TypeError,
|
||||
"is not iterable.",
|
||||
prefix,
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
isAsync = false;
|
||||
}
|
||||
|
||||
return {
|
||||
value: V,
|
||||
[AsyncIterable]: AsyncIterable,
|
||||
open(context) {
|
||||
const iter = FunctionPrototypeCall(method, V);
|
||||
if (type(iter) !== "Object") {
|
||||
throw new TypeError(
|
||||
`${context} could not be iterated because iterator method did not return object, but ${
|
||||
type(iter)
|
||||
}.`,
|
||||
);
|
||||
}
|
||||
|
||||
let asyncIterator = iter;
|
||||
|
||||
if (!isAsync) {
|
||||
asyncIterator = {
|
||||
// deno-lint-ignore require-await
|
||||
async next() {
|
||||
// deno-lint-ignore prefer-primordials
|
||||
return iter.next();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
async next() {
|
||||
// deno-lint-ignore prefer-primordials
|
||||
const iterResult = await asyncIterator.next();
|
||||
if (type(iterResult) !== "Object") {
|
||||
throw TypeError(
|
||||
`${context} failed to iterate next value because the next() method did not return an object, but ${
|
||||
type(iterResult)
|
||||
}.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (iterResult.done) {
|
||||
return { done: true };
|
||||
}
|
||||
|
||||
const iterValue = converter(
|
||||
iterResult.value,
|
||||
`${context} failed to iterate next value`,
|
||||
`The value returned from the next() method`,
|
||||
opts,
|
||||
);
|
||||
|
||||
return { done: false, value: iterValue };
|
||||
},
|
||||
async return(reason) {
|
||||
if (asyncIterator.return === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// deno-lint-ignore prefer-primordials
|
||||
const returnPromiseResult = await asyncIterator.return(reason);
|
||||
if (type(returnPromiseResult) !== "Object") {
|
||||
throw TypeError(
|
||||
`${context} failed to close iterator because the return() method did not return an object, but ${
|
||||
type(returnPromiseResult)
|
||||
}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
[SymbolAsyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
function createRecordConverter(keyConverter, valueConverter) {
|
||||
return (V, prefix, context, opts) => {
|
||||
if (type(V) !== "Object") {
|
||||
|
@ -1302,9 +1425,11 @@ function setlike(obj, objPrototype, readonly) {
|
|||
|
||||
export {
|
||||
assertBranded,
|
||||
AsyncIterable,
|
||||
brand,
|
||||
configureInterface,
|
||||
converters,
|
||||
createAsyncIterableConverter,
|
||||
createBranded,
|
||||
createDictionaryConverter,
|
||||
createEnumConverter,
|
||||
|
@ -1315,6 +1440,7 @@ export {
|
|||
createSequenceConverter,
|
||||
illegalConstructor,
|
||||
invokeCallbackFunction,
|
||||
isAsyncIterable,
|
||||
makeException,
|
||||
mixinPairIterable,
|
||||
requiredArguments,
|
||||
|
|
26
ext/webidl/internal.d.ts
vendored
26
ext/webidl/internal.d.ts
vendored
|
@ -438,6 +438,27 @@ declare module "ext:deno_webidl/00_webidl.js" {
|
|||
opts?: any,
|
||||
) => T[];
|
||||
|
||||
/**
|
||||
* Create a converter that converts an async iterable of the inner type.
|
||||
*/
|
||||
function createAsyncIterableConverter<V, T>(
|
||||
converter: (
|
||||
v: V,
|
||||
prefix?: string,
|
||||
context?: string,
|
||||
opts?: any,
|
||||
) => T,
|
||||
): (
|
||||
v: any,
|
||||
prefix?: string,
|
||||
context?: string,
|
||||
opts?: any,
|
||||
) => ConvertedAsyncIterable<V, T>;
|
||||
|
||||
interface ConvertedAsyncIterable<V, T> extends AsyncIterableIterator<T> {
|
||||
value: V;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a converter that converts a Promise of the inner type.
|
||||
*/
|
||||
|
@ -559,4 +580,9 @@ declare module "ext:deno_webidl/00_webidl.js" {
|
|||
| "Symbol"
|
||||
| "BigInt"
|
||||
| "Object";
|
||||
|
||||
/**
|
||||
* Check whether a value is an async iterable.
|
||||
*/
|
||||
function isAsyncIterable(v: any): boolean;
|
||||
}
|
||||
|
|
|
@ -8270,6 +8270,130 @@ fn lsp_npm_auto_import_and_quick_fix_byonm() {
|
|||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_npm_auto_import_with_deno_types() {
|
||||
let context = TestContextBuilder::new()
|
||||
.use_http_server()
|
||||
.use_temp_cwd()
|
||||
.add_npm_env_vars()
|
||||
.build();
|
||||
let temp_dir = context.temp_dir();
|
||||
temp_dir.write(
|
||||
"deno.json",
|
||||
json!({
|
||||
"compilerOptions": {
|
||||
"jsx": "react-jsx",
|
||||
"jsxImportSource": "react",
|
||||
"jsxImportSourceTypes": "@types/react",
|
||||
},
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
temp_dir.write(
|
||||
"package.json",
|
||||
json!({
|
||||
"dependencies": {
|
||||
"react": "*",
|
||||
"@types/react": "*",
|
||||
"lz-string": "1.3",
|
||||
"@types/lz-string": "1.3",
|
||||
},
|
||||
})
|
||||
.to_string(),
|
||||
);
|
||||
context.run_npm("install");
|
||||
temp_dir.write(
|
||||
"other.ts",
|
||||
r#"
|
||||
// @deno-types="@types/lz-string"
|
||||
import "lz-string";
|
||||
"#,
|
||||
);
|
||||
let mut client = context.new_lsp_command().build();
|
||||
client.initialize_default();
|
||||
client.did_open(json!({
|
||||
"textDocument": {
|
||||
"uri": temp_dir.url().join("file.ts").unwrap(),
|
||||
"languageId": "typescript",
|
||||
"version": 1,
|
||||
"text": r#"
|
||||
compressToBase64();
|
||||
createRef();
|
||||
"#,
|
||||
},
|
||||
}));
|
||||
let list = client.get_completion_list(
|
||||
temp_dir.url().join("file.ts").unwrap(),
|
||||
(1, 24),
|
||||
json!({ "triggerKind": 1 }),
|
||||
);
|
||||
let item = list
|
||||
.items
|
||||
.iter()
|
||||
.find(|item| item.label == "compressToBase64")
|
||||
.unwrap();
|
||||
let res = client.write_request("completionItem/resolve", item);
|
||||
assert_eq!(
|
||||
res,
|
||||
json!({
|
||||
"label": "compressToBase64",
|
||||
"labelDetails": {
|
||||
"description": "lz-string",
|
||||
},
|
||||
"kind": 2,
|
||||
"detail": "(method) LZString.LZStringStatic.compressToBase64(uncompressed: string): string",
|
||||
"documentation": {
|
||||
"kind": "markdown",
|
||||
"value": "Compresses input string producing an instance of a ASCII UTF-16 string,\nwhich represents the original string encoded in Base64.\nThe result can be safely transported outside the browser with a\nguarantee that none of the characters produced need to be URL-encoded.\n\n*@param* - uncompressed A string which should be compressed.",
|
||||
},
|
||||
"sortText": "16_0",
|
||||
"additionalTextEdits": [
|
||||
{
|
||||
"range": {
|
||||
"start": { "line": 0, "character": 0 },
|
||||
"end": { "line": 0, "character": 0 },
|
||||
},
|
||||
"newText": "// @deno-types=\"@types/lz-string\"\nimport { compressToBase64 } from \"lz-string\";\n",
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
let list = client.get_completion_list(
|
||||
temp_dir.url().join("file.ts").unwrap(),
|
||||
(2, 17),
|
||||
json!({ "triggerKind": 1 }),
|
||||
);
|
||||
let item = list
|
||||
.items
|
||||
.iter()
|
||||
.find(|item| item.label == "createRef")
|
||||
.unwrap();
|
||||
let res = client.write_request("completionItem/resolve", item);
|
||||
assert_eq!(
|
||||
res,
|
||||
json!({
|
||||
"label": "createRef",
|
||||
"labelDetails": {
|
||||
"description": "react",
|
||||
},
|
||||
"kind": 3,
|
||||
"detail": "function React.createRef<T>(): React.RefObject<T>",
|
||||
"documentation": { "kind": "markdown", "value": "" },
|
||||
"sortText": "16_0",
|
||||
"additionalTextEdits": [
|
||||
{
|
||||
"range": {
|
||||
"start": { "line": 0, "character": 0 },
|
||||
"end": { "line": 0, "character": 0 },
|
||||
},
|
||||
"newText": "// @deno-types=\"@types/react\"\nimport { createRef } from \"react\";\n",
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lsp_completions_node_specifier() {
|
||||
let context = TestContextBuilder::new().use_temp_cwd().build();
|
||||
|
|
|
@ -72,6 +72,7 @@ util::unit_test_factory!(
|
|||
dgram_test,
|
||||
domain_test,
|
||||
fs_test,
|
||||
fetch_test,
|
||||
http_test,
|
||||
http2_test,
|
||||
inspector_test,
|
||||
|
|
5
tests/specs/npm/typo_in_npm/__test__.jsonc
Normal file
5
tests/specs/npm/typo_in_npm/__test__.jsonc
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"args": "run np:cowsay",
|
||||
"output": "main.out",
|
||||
"exitCode": 1
|
||||
}
|
8
tests/specs/npm/typo_in_npm/main.out
Normal file
8
tests/specs/npm/typo_in_npm/main.out
Normal file
|
@ -0,0 +1,8 @@
|
|||
error: Unsupported scheme "np" for module "np:cowsay". Supported schemes:
|
||||
- "blob"
|
||||
- "data"
|
||||
- "file"
|
||||
- "http"
|
||||
- "https"
|
||||
- "jsr"
|
||||
- "npm"
|
|
@ -1,7 +1,8 @@
|
|||
error: Uncaught (in promise) TypeError: Unsupported scheme "xxx" for module "xxx:". Supported schemes: [
|
||||
"data",
|
||||
"blob",
|
||||
"file",
|
||||
"http",
|
||||
"https",
|
||||
]
|
||||
error: Uncaught (in promise) TypeError: Unsupported scheme "xxx" for module "xxx:". Supported schemes:
|
||||
- "blob"
|
||||
- "data"
|
||||
- "file"
|
||||
- "http"
|
||||
- "https"
|
||||
- "jsr"
|
||||
- "npm"
|
||||
|
|
15
tests/testdata/run/extension_import.ts.out
vendored
15
tests/testdata/run/extension_import.ts.out
vendored
|
@ -1,8 +1,9 @@
|
|||
error: Unsupported scheme "ext" for module "ext:runtime/01_errors.js". Supported schemes: [
|
||||
"data",
|
||||
"blob",
|
||||
"file",
|
||||
"http",
|
||||
"https",
|
||||
]
|
||||
error: Unsupported scheme "ext" for module "ext:runtime/01_errors.js". Supported schemes:
|
||||
- "blob"
|
||||
- "data"
|
||||
- "file"
|
||||
- "http"
|
||||
- "https"
|
||||
- "jsr"
|
||||
- "npm"
|
||||
at [WILDCARD]/extension_import.ts:1:8
|
||||
|
|
|
@ -2119,3 +2119,30 @@ Deno.test(
|
|||
await server;
|
||||
},
|
||||
);
|
||||
|
||||
Deno.test("fetch async iterable", async () => {
|
||||
const iterable = (async function* () {
|
||||
yield new Uint8Array([1, 2, 3, 4, 5]);
|
||||
yield new Uint8Array([6, 7, 8, 9, 10]);
|
||||
})();
|
||||
const res = new Response(iterable);
|
||||
const actual = await res.bytes();
|
||||
const expected = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
assertEquals(actual, expected);
|
||||
});
|
||||
|
||||
Deno.test("fetch iterable", async () => {
|
||||
const iterable = (function* () {
|
||||
yield new Uint8Array([1, 2, 3, 4, 5]);
|
||||
yield new Uint8Array([6, 7, 8, 9, 10]);
|
||||
})();
|
||||
const res = new Response(iterable);
|
||||
const actual = await res.bytes();
|
||||
const expected = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
assertEquals(actual, expected);
|
||||
});
|
||||
|
||||
Deno.test("fetch string object", async () => {
|
||||
const res = new Response(Object("hello"));
|
||||
assertEquals(await res.text(), "hello");
|
||||
});
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
import { assertEquals, assertRejects, fail } from "./test_util.ts";
|
||||
import {
|
||||
assertEquals,
|
||||
assertRejects,
|
||||
assertThrows,
|
||||
fail,
|
||||
} from "./test_util.ts";
|
||||
|
||||
const {
|
||||
core,
|
||||
|
@ -533,3 +538,11 @@ Deno.test(async function decompressionStreamInvalidGzipStillReported() {
|
|||
"corrupt gzip stream does not have a matching checksum",
|
||||
);
|
||||
});
|
||||
|
||||
Deno.test(function readableStreamFromWithStringThrows() {
|
||||
assertThrows(
|
||||
() => ReadableStream.from("string"),
|
||||
TypeError,
|
||||
"Failed to execute 'ReadableStream.from': Argument 1 can not be converted to async iterable.",
|
||||
);
|
||||
});
|
||||
|
|
18
tests/unit_node/fetch_test.ts
Normal file
18
tests/unit_node/fetch_test.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
import { assertEquals } from "@std/assert";
|
||||
import { createReadStream } from "node:fs";
|
||||
|
||||
Deno.test("fetch node stream", async () => {
|
||||
const file = createReadStream("tests/testdata/assets/fixture.json");
|
||||
|
||||
const response = await fetch("http://localhost:4545/echo_server", {
|
||||
method: "POST",
|
||||
body: file,
|
||||
});
|
||||
|
||||
assertEquals(
|
||||
await response.text(),
|
||||
await Deno.readTextFile("tests/testdata/assets/fixture.json"),
|
||||
);
|
||||
});
|
|
@ -3257,8 +3257,14 @@
|
|||
"owning-type-message-port.any.worker.html": false,
|
||||
"owning-type.any.html": false,
|
||||
"owning-type.any.worker.html": false,
|
||||
"from.any.html": true,
|
||||
"from.any.worker.html": true
|
||||
"from.any.html": [
|
||||
"ReadableStream.from ignores a null @@asyncIterator",
|
||||
"ReadableStream.from accepts a string"
|
||||
],
|
||||
"from.any.worker.html": [
|
||||
"ReadableStream.from ignores a null @@asyncIterator",
|
||||
"ReadableStream.from accepts a string"
|
||||
]
|
||||
},
|
||||
"transform-streams": {
|
||||
"backpressure.any.html": true,
|
||||
|
|
Loading…
Reference in a new issue