mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 15:04:11 -05:00
refactor(lsp): fewer clones (#17551)
This commit is contained in:
parent
21065797f6
commit
7f38f30a5c
8 changed files with 22 additions and 31 deletions
|
@ -154,7 +154,7 @@ fn get_validated_scheme(
|
|||
/// the value of a content type header.
|
||||
pub fn map_content_type(
|
||||
specifier: &ModuleSpecifier,
|
||||
maybe_content_type: Option<String>,
|
||||
maybe_content_type: Option<&String>,
|
||||
) -> (MediaType, Option<String>) {
|
||||
if let Some(content_type) = maybe_content_type {
|
||||
let mut content_types = content_type.split(';');
|
||||
|
@ -226,7 +226,7 @@ impl FileFetcher {
|
|||
.ok_or_else(|| {
|
||||
generic_error("Cannot convert specifier to cached filename.")
|
||||
})?;
|
||||
let maybe_content_type = headers.get("content-type").cloned();
|
||||
let maybe_content_type = headers.get("content-type");
|
||||
let (media_type, maybe_charset) =
|
||||
map_content_type(specifier, maybe_content_type);
|
||||
let source = get_source_from_bytes(bytes, maybe_charset)?;
|
||||
|
@ -308,8 +308,7 @@ impl FileFetcher {
|
|||
}
|
||||
|
||||
let (source, content_type) = get_source_from_data_url(specifier)?;
|
||||
let (media_type, _) =
|
||||
map_content_type(specifier, Some(content_type.clone()));
|
||||
let (media_type, _) = map_content_type(specifier, Some(&content_type));
|
||||
|
||||
let local =
|
||||
self
|
||||
|
@ -372,7 +371,7 @@ impl FileFetcher {
|
|||
let bytes = blob.read_all().await?;
|
||||
|
||||
let (media_type, maybe_charset) =
|
||||
map_content_type(specifier, Some(content_type.clone()));
|
||||
map_content_type(specifier, Some(&content_type));
|
||||
let source = get_source_from_bytes(bytes, maybe_charset)?;
|
||||
|
||||
let local =
|
||||
|
@ -1028,7 +1027,7 @@ mod tests {
|
|||
for (specifier, maybe_content_type, media_type, maybe_charset) in fixtures {
|
||||
let specifier = resolve_url_or_path(specifier).unwrap();
|
||||
assert_eq!(
|
||||
map_content_type(&specifier, maybe_content_type),
|
||||
map_content_type(&specifier, maybe_content_type.as_ref()),
|
||||
(media_type, maybe_charset)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -324,7 +324,7 @@ fn get_import_map_completions(
|
|||
new_text: label.clone(),
|
||||
}));
|
||||
items.push(lsp::CompletionItem {
|
||||
label: label.clone(),
|
||||
label,
|
||||
kind,
|
||||
detail: Some("(import map)".to_string()),
|
||||
sort_text: Some("1".to_string()),
|
||||
|
@ -523,12 +523,7 @@ mod tests {
|
|||
for (specifier, source, version, language_id) in fixtures {
|
||||
let specifier =
|
||||
resolve_url(specifier).expect("failed to create specifier");
|
||||
documents.open(
|
||||
specifier.clone(),
|
||||
*version,
|
||||
*language_id,
|
||||
(*source).into(),
|
||||
);
|
||||
documents.open(specifier, *version, *language_id, (*source).into());
|
||||
}
|
||||
let http_cache = HttpCache::new(location);
|
||||
for (specifier, source) in source_fixtures {
|
||||
|
|
|
@ -560,8 +560,7 @@ impl Config {
|
|||
for (workspace, folder) in workspace_folders {
|
||||
if let Ok(settings) = client.specifier_configuration(&folder.uri).await
|
||||
{
|
||||
if self.update_enabled_paths_entry(&workspace, settings.enable_paths)
|
||||
{
|
||||
if self.update_enabled_paths_entry(workspace, settings.enable_paths) {
|
||||
touched = true;
|
||||
}
|
||||
}
|
||||
|
@ -569,7 +568,7 @@ impl Config {
|
|||
touched
|
||||
} else if let Some(root_uri) = self.root_uri.clone() {
|
||||
self.update_enabled_paths_entry(
|
||||
&root_uri,
|
||||
root_uri,
|
||||
self.settings.workspace.enable_paths.clone(),
|
||||
)
|
||||
} else {
|
||||
|
@ -580,10 +579,10 @@ impl Config {
|
|||
/// Update a specific entry in the enabled paths for a given workspace.
|
||||
fn update_enabled_paths_entry(
|
||||
&mut self,
|
||||
workspace: &ModuleSpecifier,
|
||||
workspace: ModuleSpecifier,
|
||||
enabled_paths: Vec<String>,
|
||||
) -> bool {
|
||||
let workspace = ensure_directory_specifier(workspace.clone());
|
||||
let workspace = ensure_directory_specifier(workspace);
|
||||
let key = workspace.to_string();
|
||||
let mut touched = false;
|
||||
if !enabled_paths.is_empty() {
|
||||
|
|
|
@ -718,8 +718,7 @@ impl FileSystemDocuments {
|
|||
} else {
|
||||
let cache_filename = cache.get_cache_filename(specifier)?;
|
||||
let specifier_metadata = CachedUrlMetadata::read(&cache_filename).ok()?;
|
||||
let maybe_content_type =
|
||||
specifier_metadata.headers.get("content-type").cloned();
|
||||
let maybe_content_type = specifier_metadata.headers.get("content-type");
|
||||
let maybe_headers = Some(&specifier_metadata.headers);
|
||||
let (_, maybe_charset) = map_content_type(specifier, maybe_content_type);
|
||||
let content = get_source_from_bytes(bytes, maybe_charset).ok()?;
|
||||
|
@ -1055,9 +1054,7 @@ impl Documents {
|
|||
} else if let Some(Resolved::Ok { specifier, .. }) =
|
||||
self.resolve_imports_dependency(&specifier)
|
||||
{
|
||||
// clone here to avoid double borrow of self
|
||||
let specifier = specifier.clone();
|
||||
results.push(self.resolve_dependency(&specifier, maybe_npm_resolver));
|
||||
results.push(self.resolve_dependency(specifier, maybe_npm_resolver));
|
||||
} else if let Ok(npm_ref) = NpmPackageReference::from_str(&specifier) {
|
||||
results.push(maybe_npm_resolver.map(|npm_resolver| {
|
||||
NodeResolution::into_specifier_and_media_type(
|
||||
|
|
|
@ -1528,7 +1528,7 @@ impl Inner {
|
|||
.extend(refactor_info.to_code_actions(&specifier, ¶ms.range));
|
||||
}
|
||||
all_actions.extend(
|
||||
refactor::prune_invalid_actions(&refactor_actions, 5)
|
||||
refactor::prune_invalid_actions(refactor_actions, 5)
|
||||
.into_iter()
|
||||
.map(CodeActionOrCommand::CodeAction),
|
||||
);
|
||||
|
|
|
@ -121,10 +121,11 @@ impl Performance {
|
|||
averages
|
||||
.into_iter()
|
||||
.map(|(k, d)| {
|
||||
let a = d.clone().into_iter().sum::<Duration>() / d.len() as u32;
|
||||
let count = d.len() as u32;
|
||||
let a = d.into_iter().sum::<Duration>() / count;
|
||||
PerformanceAverage {
|
||||
name: k,
|
||||
count: d.len() as u32,
|
||||
count,
|
||||
average_duration: a.as_millis() as u32,
|
||||
}
|
||||
})
|
||||
|
|
|
@ -157,7 +157,7 @@ pub struct RefactorCodeActionData {
|
|||
}
|
||||
|
||||
pub fn prune_invalid_actions(
|
||||
actions: &[lsp::CodeAction],
|
||||
actions: Vec<lsp::CodeAction>,
|
||||
number_of_invalid: usize,
|
||||
) -> Vec<lsp::CodeAction> {
|
||||
let mut available_actions = Vec::<lsp::CodeAction>::new();
|
||||
|
@ -165,7 +165,7 @@ pub fn prune_invalid_actions(
|
|||
let mut invalid_uncommon_actions = Vec::<lsp::CodeAction>::new();
|
||||
for action in actions {
|
||||
if action.disabled.is_none() {
|
||||
available_actions.push(action.clone());
|
||||
available_actions.push(action);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -175,12 +175,12 @@ pub fn prune_invalid_actions(
|
|||
if action_kind.starts_with(EXTRACT_CONSTANT.kind.as_str())
|
||||
|| action_kind.starts_with(EXTRACT_FUNCTION.kind.as_str())
|
||||
{
|
||||
invalid_common_actions.push(action.clone());
|
||||
invalid_common_actions.push(action);
|
||||
continue;
|
||||
}
|
||||
|
||||
// These are the remaining refactors that we can show if we haven't reached the max limit with just common refactors.
|
||||
invalid_uncommon_actions.push(action.clone());
|
||||
invalid_uncommon_actions.push(action);
|
||||
}
|
||||
|
||||
let mut prioritized_actions = Vec::<lsp::CodeAction>::new();
|
||||
|
|
|
@ -107,7 +107,7 @@ impl LspUrlMap {
|
|||
.map_err(|e| uri_error(format!("{:?}", e)))?;
|
||||
let mime = data_url.mime_type();
|
||||
let (media_type, _) =
|
||||
map_content_type(specifier, Some(format!("{}", mime)));
|
||||
map_content_type(specifier, Some(&format!("{}", mime)));
|
||||
let extension = if media_type == MediaType::Unknown {
|
||||
""
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue