1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-07 06:46:59 -05:00

refactor(cli,ext,ops): cleanup regex with lazy-regex (#17296)

- bump deps: the newest `lazy-regex` need newer `oncecell` and
`regex`
- reduce `unwrap`
- remove dep `lazy_static`
- make more regex cached

---------

Co-authored-by: Bartek Iwańczuk <biwanczuk@gmail.com>
This commit is contained in:
Yiyu Lin 2023-04-13 09:08:01 +08:00 committed by Levente Kurusa
parent 300ced90d8
commit 9161e1cacf
No known key found for this signature in database
GPG key ID: 9F72F3C05BA137C4
23 changed files with 96 additions and 91 deletions

28
Cargo.lock generated
View file

@ -761,6 +761,7 @@ dependencies = [
"indexmap", "indexmap",
"jsonc-parser", "jsonc-parser",
"junction", "junction",
"lazy-regex",
"libc", "libc",
"log", "log",
"lsp-types", "lsp-types",
@ -1156,6 +1157,7 @@ dependencies = [
"hkdf", "hkdf",
"idna 0.3.0", "idna 0.3.0",
"indexmap", "indexmap",
"lazy-regex",
"libz-sys", "libz-sys",
"md-5", "md-5",
"md4", "md4",
@ -1201,6 +1203,7 @@ dependencies = [
name = "deno_ops" name = "deno_ops"
version = "0.58.0" version = "0.58.0"
dependencies = [ dependencies = [
"lazy-regex",
"once_cell", "once_cell",
"pmutil", "pmutil",
"prettyplease", "prettyplease",
@ -2523,6 +2526,29 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "lazy-regex"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff63c423c68ea6814b7da9e88ce585f793c87ddd9e78f646970891769c8235d4"
dependencies = [
"lazy-regex-proc_macros",
"once_cell",
"regex",
]
[[package]]
name = "lazy-regex-proc_macros"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8edfc11b8f56ce85e207e62ea21557cfa09bb24a8f6b04ae181b086ff8611c22"
dependencies = [
"proc-macro2 1.0.56",
"quote 1.0.26",
"regex",
"syn 1.0.109",
]
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
version = "1.4.0" version = "1.4.0"
@ -4765,7 +4791,7 @@ dependencies = [
"flate2", "flate2",
"futures", "futures",
"hyper", "hyper",
"lazy_static", "lazy-regex",
"lsp-types", "lsp-types",
"nix", "nix",
"once_cell", "once_cell",

View file

@ -110,6 +110,7 @@ pin-project = "1.0.11" # don't pin because they yank crates from cargo
pretty_assertions = "=1.3.0" pretty_assertions = "=1.3.0"
rand = "=0.8.5" rand = "=0.8.5"
regex = "^1.7.0" regex = "^1.7.0"
lazy-regex = "2.5.0"
reqwest = { version = "0.11.11", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks"] } reqwest = { version = "0.11.11", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli", "socks"] }
ring = "=0.16.20" ring = "=0.16.20"
rusqlite = { version = "=0.28.0", features = ["unlock_notify", "bundled"] } rusqlite = { version = "=0.28.0", features = ["unlock_notify", "bundled"] }

View file

@ -29,7 +29,7 @@ path = "./bench/lsp_bench_standalone.rs"
[build-dependencies] [build-dependencies]
deno_runtime = { workspace = true, features = ["snapshot_from_snapshot", "include_js_files_for_snapshotting"] } deno_runtime = { workspace = true, features = ["snapshot_from_snapshot", "include_js_files_for_snapshotting"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
regex.workspace = true lazy-regex.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
zstd.workspace = true zstd.workspace = true
@ -78,6 +78,7 @@ http.workspace = true
import_map = "=0.15.0" import_map = "=0.15.0"
indexmap.workspace = true indexmap.workspace = true
jsonc-parser = { version = "=0.21.0", features = ["serde"] } jsonc-parser = { version = "=0.21.0", features = ["serde"] }
lazy-regex.workspace = true
libc.workspace = true libc.workspace = true
log = { workspace = true, features = ["serde"] } log = { workspace = true, features = ["serde"] }
lsp-types.workspace = true lsp-types.workspace = true

View file

@ -20,7 +20,6 @@ mod ts {
use deno_core::op; use deno_core::op;
use deno_core::OpState; use deno_core::OpState;
use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES; use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES;
use regex::Regex;
use serde::Deserialize; use serde::Deserialize;
use serde_json::json; use serde_json::json;
use serde_json::Value; use serde_json::Value;
@ -69,8 +68,7 @@ mod ts {
fn op_load(state: &mut OpState, args: LoadArgs) -> Result<Value, AnyError> { fn op_load(state: &mut OpState, args: LoadArgs) -> Result<Value, AnyError> {
let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>(); let op_crate_libs = state.borrow::<HashMap<&str, PathBuf>>();
let path_dts = state.borrow::<PathBuf>(); let path_dts = state.borrow::<PathBuf>();
let re_asset = let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts");
Regex::new(r"asset:/{3}lib\.(\S+)\.d\.ts").expect("bad regex");
let build_specifier = "asset:///bootstrap.ts"; let build_specifier = "asset:///bootstrap.ts";
// we need a basic file to send to tsc to warm it up. // we need a basic file to send to tsc to warm it up.

View file

@ -53,7 +53,7 @@ static PREFERRED_FIXES: Lazy<HashMap<&'static str, (u32, bool)>> =
}); });
static IMPORT_SPECIFIER_RE: Lazy<Regex> = static IMPORT_SPECIFIER_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"\sfrom\s+["']([^"']*)["']"#).unwrap()); lazy_regex::lazy_regex!(r#"\sfrom\s+["']([^"']*)["']"#);
const SUPPORTED_EXTENSIONS: &[&str] = &[".ts", ".tsx", ".js", ".jsx", ".mjs"]; const SUPPORTED_EXTENSIONS: &[&str] = &[".ts", ".tsx", ".js", ".jsx", ".mjs"];

View file

@ -21,6 +21,7 @@ use deno_core::serde::Serialize;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::serde_json::json; use deno_core::serde_json::json;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use lazy_regex::lazy_regex;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::cell::RefCell; use std::cell::RefCell;
@ -29,11 +30,9 @@ use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
static ABSTRACT_MODIFIER: Lazy<Regex> = static ABSTRACT_MODIFIER: Lazy<Regex> = lazy_regex!(r"\babstract\b");
Lazy::new(|| Regex::new(r"\babstract\b").unwrap());
static EXPORT_MODIFIER: Lazy<Regex> = static EXPORT_MODIFIER: Lazy<Regex> = lazy_regex!(r"\bexport\b");
Lazy::new(|| Regex::new(r"\bexport\b").unwrap());
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]
pub enum CodeLensSource { pub enum CodeLensSource {

View file

@ -28,7 +28,7 @@ use std::sync::Arc;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
static FILE_PROTO_RE: Lazy<Regex> = static FILE_PROTO_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"^file:/{2}(?:/[A-Za-z]:)?"#).unwrap()); lazy_regex::lazy_regex!(r#"^file:/{2}(?:/[A-Za-z]:)?"#);
const CURRENT_PATH: &str = "."; const CURRENT_PATH: &str = ".";
const PARENT_PATH: &str = ".."; const PARENT_PATH: &str = "..";

View file

@ -37,7 +37,7 @@ use std::fmt::Write as _;
use std::iter::Peekable; use std::iter::Peekable;
static ESCAPE_STRING_RE: Lazy<Regex> = static ESCAPE_STRING_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"([.+*?=^!:${}()\[\]|/\\])").unwrap()); lazy_regex::lazy_regex!(r"([.+*?=^!:${}()\[\]|/\\])");
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
enum TokenType { enum TokenType {

View file

@ -33,7 +33,6 @@ use deno_runtime::deno_web::BlobStore;
use deno_runtime::permissions::PermissionsContainer; use deno_runtime::permissions::PermissionsContainer;
use log::error; use log::error;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
@ -66,8 +65,8 @@ const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS
const REGISTRY_IMPORT_COMMIT_CHARS: &[&str] = &["\"", "'", "/"]; const REGISTRY_IMPORT_COMMIT_CHARS: &[&str] = &["\"", "'", "/"];
static REPLACEMENT_VARIABLE_RE: Lazy<Regex> = static REPLACEMENT_VARIABLE_RE: Lazy<regex::Regex> =
Lazy::new(|| Regex::new(r"\$\{\{?(\w+)\}?\}").unwrap()); lazy_regex::lazy_regex!(r"\$\{\{?(\w+)\}?\}");
fn base_url(url: &Url) -> String { fn base_url(url: &Url) -> String {
url.origin().ascii_serialization() url.origin().ascii_serialization()

View file

@ -44,6 +44,7 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState; use deno_core::OpState;
use deno_core::RuntimeOptions; use deno_core::RuntimeOptions;
use deno_runtime::tokio_util::create_basic_runtime; use deno_runtime::tokio_util::create_basic_runtime;
use lazy_regex::lazy_regex;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Captures; use regex::Captures;
use regex::Regex; use regex::Regex;
@ -65,24 +66,18 @@ use tower_lsp::jsonrpc::Result as LspResult;
use tower_lsp::lsp_types as lsp; use tower_lsp::lsp_types as lsp;
static BRACKET_ACCESSOR_RE: Lazy<Regex> = static BRACKET_ACCESSOR_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"^\[['"](.+)[\['"]\]$"#).unwrap()); lazy_regex!(r#"^\[['"](.+)[\['"]\]$"#);
static CAPTION_RE: Lazy<Regex> = Lazy::new(|| { static CAPTION_RE: Lazy<Regex> =
Regex::new(r"<caption>(.*?)</caption>\s*\r?\n((?:\s|\S)*)").unwrap() lazy_regex!(r"<caption>(.*?)</caption>\s*\r?\n((?:\s|\S)*)");
}); static CODEBLOCK_RE: Lazy<Regex> = lazy_regex!(r"^\s*[~`]{3}");
static CODEBLOCK_RE: Lazy<Regex> = static EMAIL_MATCH_RE: Lazy<Regex> = lazy_regex!(r"(.+)\s<([-.\w]+@[-.\w]+)>");
Lazy::new(|| Regex::new(r"^\s*[~`]{3}").unwrap()); static HTTP_RE: Lazy<Regex> = lazy_regex!(r#"(?i)^https?:"#);
static EMAIL_MATCH_RE: Lazy<Regex> = static JSDOC_LINKS_RE: Lazy<Regex> = lazy_regex!(
Lazy::new(|| Regex::new(r"(.+)\s<([-.\w]+@[-.\w]+)>").unwrap()); r"(?i)\{@(link|linkplain|linkcode) (https?://[^ |}]+?)(?:[| ]([^{}\n]+?))?\}"
static HTTP_RE: Lazy<Regex> = );
Lazy::new(|| Regex::new(r#"(?i)^https?:"#).unwrap()); static PART_KIND_MODIFIER_RE: Lazy<Regex> = lazy_regex!(r",|\s+");
static JSDOC_LINKS_RE: Lazy<Regex> = Lazy::new(|| { static PART_RE: Lazy<Regex> = lazy_regex!(r"^(\S+)\s*-?\s*");
Regex::new(r"(?i)\{@(link|linkplain|linkcode) (https?://[^ |}]+?)(?:[| ]([^{}\n]+?))?\}").unwrap() static SCOPE_RE: Lazy<Regex> = lazy_regex!(r"scope_(\d)");
});
static PART_KIND_MODIFIER_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r",|\s+").unwrap());
static PART_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^(\S+)\s*-?\s*").unwrap());
static SCOPE_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"scope_(\d)").unwrap());
const FILE_EXTENSION_KIND_MODIFIERS: &[&str] = const FILE_EXTENSION_KIND_MODIFIERS: &[&str] =
&[".d.ts", ".ts", ".tsx", ".js", ".jsx", ".json"]; &[".d.ts", ".ts", ".tsx", ".js", ".jsx", ".json"];

View file

@ -36,7 +36,6 @@ use deno_runtime::permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageNv; use deno_semver::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageNvReference;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex;
use crate::cache::NodeAnalysisCache; use crate::cache::NodeAnalysisCache;
use crate::file_fetcher::FileFetcher; use crate::file_fetcher::FileFetcher;
@ -500,8 +499,7 @@ fn finalize_resolution(
resolved: ModuleSpecifier, resolved: ModuleSpecifier,
base: &ModuleSpecifier, base: &ModuleSpecifier,
) -> Result<ModuleSpecifier, AnyError> { ) -> Result<ModuleSpecifier, AnyError> {
// todo(dsherret): cache let encoded_sep_re = lazy_regex::regex!(r"%2F|%2C");
let encoded_sep_re = Regex::new(r"%2F|%2C").unwrap();
if encoded_sep_re.is_match(resolved.path()) { if encoded_sep_re.is_match(resolved.path()) {
return Err(errors::err_invalid_module_specifier( return Err(errors::err_invalid_module_specifier(

View file

@ -321,7 +321,7 @@ fn get_tsc_roots(
/// Matches the `@ts-check` pragma. /// Matches the `@ts-check` pragma.
static TS_CHECK_RE: Lazy<Regex> = static TS_CHECK_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"(?i)^\s*@ts-check(?:\s+|$)"#).unwrap()); lazy_regex::lazy_regex!(r#"(?i)^\s*@ts-check(?:\s+|$)"#);
fn has_ts_check(media_type: MediaType, file_text: &str) -> bool { fn has_ts_check(media_type: MediaType, file_text: &str) -> bool {
match &media_type { match &media_type {

View file

@ -35,7 +35,7 @@ static EXEC_NAME_RE: Lazy<Regex> = Lazy::new(|| {
RegexBuilder::new(r"^[a-z][\w-]*$") RegexBuilder::new(r"^[a-z][\w-]*$")
.case_insensitive(true) .case_insensitive(true)
.build() .build()
.unwrap() .expect("invalid regex")
}); });
fn validate_name(exec_name: &str) -> Result<(), AnyError> { fn validate_name(exec_name: &str) -> Result<(), AnyError> {

View file

@ -1031,8 +1031,8 @@ fn extract_files_from_source_comments(
scope_analysis: false, scope_analysis: false,
})?; })?;
let comments = parsed_source.comments().get_vec(); let comments = parsed_source.comments().get_vec();
let blocks_regex = Regex::new(r"```([^\r\n]*)\r?\n([\S\s]*?)```")?; let blocks_regex = lazy_regex::regex!(r"```([^\r\n]*)\r?\n([\S\s]*?)```");
let lines_regex = Regex::new(r"(?:\* ?)(?:\# ?)?(.*)")?; let lines_regex = lazy_regex::regex!(r"(?:\* ?)(?:\# ?)?(.*)");
let files = comments let files = comments
.iter() .iter()
@ -1049,8 +1049,8 @@ fn extract_files_from_source_comments(
&comment.text, &comment.text,
media_type, media_type,
parsed_source.text_info().line_index(comment.start()), parsed_source.text_info().line_index(comment.start()),
&blocks_regex, blocks_regex,
&lines_regex, lines_regex,
) )
}) })
.flatten() .flatten()
@ -1069,16 +1069,16 @@ fn extract_files_from_fenced_blocks(
// check can be done to see if a block is inside a comment (and skip typechecking) // check can be done to see if a block is inside a comment (and skip typechecking)
// or not by checking for the presence of capturing groups in the matches. // or not by checking for the presence of capturing groups in the matches.
let blocks_regex = let blocks_regex =
Regex::new(r"(?s)<!--.*?-->|```([^\r\n]*)\r?\n([\S\s]*?)```")?; lazy_regex::regex!(r"(?s)<!--.*?-->|```([^\r\n]*)\r?\n([\S\s]*?)```");
let lines_regex = Regex::new(r"(?:\# ?)?(.*)")?; let lines_regex = lazy_regex::regex!(r"(?:\# ?)?(.*)");
extract_files_from_regex_blocks( extract_files_from_regex_blocks(
specifier, specifier,
source, source,
media_type, media_type,
/* file line index */ 0, /* file line index */ 0,
&blocks_regex, blocks_regex,
&lines_regex, lines_regex,
) )
} }

View file

@ -289,9 +289,9 @@ pub async fn upgrade(
let install_version = match upgrade_flags.version { let install_version = match upgrade_flags.version {
Some(passed_version) => { Some(passed_version) => {
if upgrade_flags.canary let re_hash = lazy_regex::regex!("^[0-9a-f]{40}$");
&& !regex::Regex::new("^[0-9a-f]{40}$")?.is_match(&passed_version)
{ if upgrade_flags.canary && !re_hash.is_match(&passed_version) {
bail!("Invalid commit hash passed"); bail!("Invalid commit hash passed");
} else if !upgrade_flags.canary } else if !upgrade_flags.canary
&& Version::parse_standard(&passed_version).is_err() && Version::parse_standard(&passed_version).is_err()

View file

@ -6,6 +6,7 @@ use deno_core::serde::Deserialize;
use deno_core::serde::Deserializer; use deno_core::serde::Deserializer;
use deno_core::serde::Serialize; use deno_core::serde::Serialize;
use deno_core::serde::Serializer; use deno_core::serde::Serializer;
use lazy_regex::lazy_regex;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use std::error::Error; use std::error::Error;
@ -36,13 +37,11 @@ const UNSTABLE_DENO_PROPS: &[&str] = &[
"osUptime", "osUptime",
]; ];
static MSG_MISSING_PROPERTY_DENO: Lazy<Regex> = Lazy::new(|| { static MSG_MISSING_PROPERTY_DENO: Lazy<Regex> =
Regex::new(r#"Property '([^']+)' does not exist on type 'typeof Deno'"#) lazy_regex!(r#"Property '([^']+)' does not exist on type 'typeof Deno'"#);
.unwrap()
});
static MSG_SUGGESTION: Lazy<Regex> = static MSG_SUGGESTION: Lazy<Regex> =
Lazy::new(|| Regex::new(r#" Did you mean '([^']+)'\?"#).unwrap()); lazy_regex!(r#" Did you mean '([^']+)'\?"#);
/// Potentially convert a "raw" diagnostic message from TSC to something that /// Potentially convert a "raw" diagnostic message from TSC to something that
/// provides a more sensible error message given a Deno runtime context. /// provides a more sensible error message given a Deno runtime context.

View file

@ -24,6 +24,7 @@ hex.workspace = true
hkdf.workspace = true hkdf.workspace = true
idna = "0.3.0" idna = "0.3.0"
indexmap.workspace = true indexmap.workspace = true
lazy-regex.workspace = true
libz-sys = { version = "1.1.8", features = ["static"] } libz-sys = { version = "1.1.8", features = ["static"] }
md-5 = "0.10.5" md-5 = "0.10.5"
md4 = "0.10.2" md4 = "0.10.2"

View file

@ -10,7 +10,6 @@ use deno_core::serde_json::Map;
use deno_core::serde_json::Value; use deno_core::serde_json::Value;
use deno_core::url::Url; use deno_core::url::Url;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use regex::Regex;
use crate::errors; use crate::errors;
use crate::package_json::PackageJson; use crate::package_json::PackageJson;
@ -342,8 +341,8 @@ fn resolve_package_target_string<Fs: NodeFs>(
)); ));
} }
let invalid_segment_re = let invalid_segment_re =
Regex::new(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)").expect("bad regex"); lazy_regex::regex!(r"(^|\\|/)(\.\.?|node_modules)(\\|/|$)");
let pattern_re = Regex::new(r"\*").expect("bad regex"); let pattern_re = lazy_regex::regex!(r"\*");
if !target.starts_with("./") { if !target.starts_with("./") {
if internal && !target.starts_with("../") && !target.starts_with('/') { if internal && !target.starts_with("../") && !target.starts_with('/') {
let is_url = Url::parse(&target).is_ok(); let is_url = Url::parse(&target).is_ok();

View file

@ -15,6 +15,7 @@ path = "./lib.rs"
proc-macro = true proc-macro = true
[dependencies] [dependencies]
lazy-regex.workspace = true
once_cell.workspace = true once_cell.workspace = true
pmutil = "0.5.3" pmutil = "0.5.3"
proc-macro-crate = "1.1.3" proc-macro-crate = "1.1.3"

View file

@ -1,7 +1,6 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license. // Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use attrs::Attributes; use attrs::Attributes;
use once_cell::sync::Lazy;
use optimizer::BailoutReason; use optimizer::BailoutReason;
use optimizer::Optimizer; use optimizer::Optimizer;
use proc_macro::TokenStream; use proc_macro::TokenStream;
@ -9,7 +8,6 @@ use proc_macro2::Span;
use proc_macro2::TokenStream as TokenStream2; use proc_macro2::TokenStream as TokenStream2;
use quote::quote; use quote::quote;
use quote::ToTokens; use quote::ToTokens;
use regex::Regex;
use syn::parse; use syn::parse;
use syn::parse_macro_input; use syn::parse_macro_input;
use syn::punctuated::Punctuated; use syn::punctuated::Punctuated;
@ -859,30 +857,26 @@ fn is_unit_result(ty: impl ToTokens) -> bool {
} }
fn is_resource_id(arg: impl ToTokens) -> bool { fn is_resource_id(arg: impl ToTokens) -> bool {
static RE: Lazy<Regex> = let re = lazy_regex::regex!(r#": (?:deno_core :: )?ResourceId$"#);
Lazy::new(|| Regex::new(r#": (?:deno_core :: )?ResourceId$"#).unwrap()); re.is_match(&tokens(arg))
RE.is_match(&tokens(arg))
} }
fn is_mut_ref_opstate(arg: impl ToTokens) -> bool { fn is_mut_ref_opstate(arg: impl ToTokens) -> bool {
static RE: Lazy<Regex> = let re = lazy_regex::regex!(r#": & mut (?:deno_core :: )?OpState$"#);
Lazy::new(|| Regex::new(r#": & mut (?:deno_core :: )?OpState$"#).unwrap()); re.is_match(&tokens(arg))
RE.is_match(&tokens(arg))
} }
fn is_rc_refcell_opstate(arg: &syn::FnArg) -> bool { fn is_rc_refcell_opstate(arg: &syn::FnArg) -> bool {
static RE: Lazy<Regex> = Lazy::new(|| { let re =
Regex::new(r#": Rc < RefCell < (?:deno_core :: )?OpState > >$"#).unwrap() lazy_regex::regex!(r#": Rc < RefCell < (?:deno_core :: )?OpState > >$"#);
}); re.is_match(&tokens(arg))
RE.is_match(&tokens(arg))
} }
fn is_handle_scope(arg: &syn::FnArg) -> bool { fn is_handle_scope(arg: &syn::FnArg) -> bool {
static RE: Lazy<Regex> = Lazy::new(|| { let re = lazy_regex::regex!(
Regex::new(r#": & mut (?:deno_core :: )?v8 :: HandleScope(?: < '\w+ >)?$"#) r#": & mut (?:deno_core :: )?v8 :: HandleScope(?: < '\w+ >)?$"#
.unwrap() );
}); re.is_match(&tokens(arg))
RE.is_match(&tokens(arg))
} }
fn is_future(ty: impl ToTokens) -> bool { fn is_future(ty: impl ToTokens) -> bool {

View file

@ -22,7 +22,7 @@ console_static_text.workspace = true
flate2.workspace = true flate2.workspace = true
futures.workspace = true futures.workspace = true
hyper = { workspace = true, features = ["server", "http1", "http2", "runtime"] } hyper = { workspace = true, features = ["server", "http1", "http2", "runtime"] }
lazy_static = "1.4.0" lazy-regex.workspace = true
lsp-types.workspace = true lsp-types.workspace = true
nix.workspace = true nix.workspace = true
once_cell.workspace = true once_cell.workspace = true

View file

@ -13,8 +13,8 @@ use hyper::Body;
use hyper::Request; use hyper::Request;
use hyper::Response; use hyper::Response;
use hyper::StatusCode; use hyper::StatusCode;
use lazy_static::lazy_static;
use npm::CUSTOM_NPM_PACKAGE_CACHE; use npm::CUSTOM_NPM_PACKAGE_CACHE;
use once_cell::sync::Lazy;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use pty::Pty; use pty::Pty;
use regex::Regex; use regex::Regex;
@ -91,10 +91,8 @@ pub const PERMISSION_VARIANTS: [&str; 5] =
["read", "write", "env", "net", "run"]; ["read", "write", "env", "net", "run"];
pub const PERMISSION_DENIED_PATTERN: &str = "PermissionDenied"; pub const PERMISSION_DENIED_PATTERN: &str = "PermissionDenied";
lazy_static! { static GUARD: Lazy<Mutex<HttpServerCount>> =
static ref GUARD: Mutex<HttpServerCount> = Lazy::new(|| Mutex::new(HttpServerCount::default()));
Mutex::new(HttpServerCount::default());
}
pub fn env_vars_for_npm_tests_no_sync_download() -> Vec<(String, String)> { pub fn env_vars_for_npm_tests_no_sync_download() -> Vec<(String, String)> {
vec![ vec![
@ -2176,12 +2174,10 @@ pub struct WrkOutput {
} }
pub fn parse_wrk_output(output: &str) -> WrkOutput { pub fn parse_wrk_output(output: &str) -> WrkOutput {
lazy_static! { static REQUESTS_RX: Lazy<Regex> =
static ref REQUESTS_RX: Regex = lazy_regex::lazy_regex!(r"Requests/sec:\s+(\d+)");
Regex::new(r"Requests/sec:\s+(\d+)").unwrap(); static LATENCY_RX: Lazy<Regex> =
static ref LATENCY_RX: Regex = lazy_regex::lazy_regex!(r"\s+99%(?:\s+(\d+.\d+)([a-z]+))");
Regex::new(r"\s+99%(?:\s+(\d+.\d+)([a-z]+))").unwrap();
}
let mut requests = None; let mut requests = None;
let mut latency = None; let mut latency = None;

View file

@ -9,7 +9,6 @@ use super::new_deno_dir;
use super::TempDir; use super::TempDir;
use anyhow::Result; use anyhow::Result;
use lazy_static::lazy_static;
use lsp_types as lsp; use lsp_types as lsp;
use lsp_types::ClientCapabilities; use lsp_types::ClientCapabilities;
use lsp_types::ClientInfo; use lsp_types::ClientInfo;
@ -25,6 +24,7 @@ use lsp_types::TextDocumentClientCapabilities;
use lsp_types::TextDocumentSyncClientCapabilities; use lsp_types::TextDocumentSyncClientCapabilities;
use lsp_types::Url; use lsp_types::Url;
use lsp_types::WorkspaceClientCapabilities; use lsp_types::WorkspaceClientCapabilities;
use once_cell::sync::Lazy;
use parking_lot::Condvar; use parking_lot::Condvar;
use parking_lot::Mutex; use parking_lot::Mutex;
use regex::Regex; use regex::Regex;
@ -48,10 +48,8 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use std::time::Instant; use std::time::Instant;
lazy_static! { static CONTENT_TYPE_REG: Lazy<Regex> =
static ref CONTENT_TYPE_REG: Regex = lazy_regex::lazy_regex!(r"(?i)^content-length:\s+(\d+)");
Regex::new(r"(?i)^content-length:\s+(\d+)").unwrap();
}
#[derive(Clone, Debug, Deserialize, Serialize)] #[derive(Clone, Debug, Deserialize, Serialize)]
pub struct LspResponseError { pub struct LspResponseError {