1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-24 15:19:26 -05:00

refactor(npm): use deno_npm and deno_semver (#18602)

This commit is contained in:
David Sherret 2023-04-06 18:46:44 -04:00 committed by Levente Kurusa
parent acb9dca497
commit 3da1d5f8eb
36 changed files with 759 additions and 5852 deletions

193
Cargo.lock generated
View file

@ -117,7 +117,7 @@ checksum = "cf94863c5fdfee166d0907c44e5fee970123b2b7307046d35d1e671aa93afbba"
dependencies = [
"darling",
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"swc_macros_common",
"syn 1.0.109",
@ -154,20 +154,20 @@ version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4655ae1a7b0cdf149156f780c5bf3f1352bc53cbd9e0a361a7ef7b22947e965"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
[[package]]
name = "async-trait"
version = "0.1.67"
version = "0.1.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86ea188f25f0255d8f92797797c97ebf5631fa88178beb1a46fdf5622c9a00e4"
checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 2.0.0",
"syn 2.0.13",
]
[[package]]
@ -188,7 +188,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7862e21c893d65a1650125d157eaeec691439379a1cee17ee49031b79236ada4"
dependencies = [
"proc-macro-error",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -634,7 +634,7 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610"
dependencies = [
"fnv",
"ident_case",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"strsim",
"syn 1.0.109",
@ -699,7 +699,9 @@ dependencies = [
"deno_graph",
"deno_lint",
"deno_lockfile",
"deno_npm",
"deno_runtime",
"deno_semver",
"deno_task_shell",
"dissimilar",
"dotenv",
@ -894,9 +896,9 @@ dependencies = [
[[package]]
name = "deno_doc"
version = "0.59.0"
version = "0.60.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809eb811963058339383294e27e464cf792d05e9e73c834ecf46a98c69a613c3"
checksum = "029ec20ba7a3c9d55597db7afa20576367ea8d70371a97b84f9909014cfe110f"
dependencies = [
"cfg-if",
"deno_ast",
@ -912,9 +914,9 @@ dependencies = [
[[package]]
name = "deno_emit"
version = "0.17.0"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f00bbc0de48ee82a1645063b71310bac75db182002d6cbe318b808c26c7d10ad"
checksum = "8004481b057addda0779edd5adb47e5ac9db7ae431c879300d22d535cc83cfae"
dependencies = [
"anyhow",
"base64 0.13.1",
@ -976,13 +978,14 @@ dependencies = [
[[package]]
name = "deno_graph"
version = "0.45.0"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46c47969e9745b8ecd0bbf674e374a47a2cc465b00887716295b422ddb5ae56b"
checksum = "1fb5531f3c2be6926e51ce5888fcffa434ca83516c53d26563882533aee871d0"
dependencies = [
"anyhow",
"data-url",
"deno_ast",
"deno_semver",
"futures",
"indexmap",
"monch",
@ -1133,6 +1136,24 @@ dependencies = [
"typenum",
]
[[package]]
name = "deno_npm"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b498f30dd6d0149e98b210ef7a01e54b2a8ba703b08a30fe4d87b3f36b93737c"
dependencies = [
"anyhow",
"async-trait",
"deno_semver",
"futures",
"log",
"monch",
"once_cell",
"parking_lot 0.12.1",
"serde",
"thiserror",
]
[[package]]
name = "deno_ops"
version = "0.57.0"
@ -1141,7 +1162,7 @@ dependencies = [
"pmutil",
"prettyplease",
"proc-macro-crate",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"regex",
"syn 1.0.109",
@ -1202,6 +1223,18 @@ dependencies = [
"winres",
]
[[package]]
name = "deno_semver"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e2d677702f75c9ff62afc3e06d24a4affa661d6a6cdbf3ca705af8a9170c71d"
dependencies = [
"monch",
"serde",
"thiserror",
"url",
]
[[package]]
name = "deno_task_shell"
version = "0.10.0"
@ -1312,7 +1345,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"rustc_version 0.4.0",
"syn 1.0.109",
@ -1475,7 +1508,7 @@ dependencies = [
"byteorder",
"lazy_static",
"proc-macro-error",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -1562,7 +1595,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116"
dependencies = [
"heck",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -1574,7 +1607,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b940da354ae81ef0926c5eaa428207b8f4f091d3956c891dfbd124162bed99"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"swc_macros_common",
"syn 1.0.109",
]
@ -1634,9 +1667,9 @@ dependencies = [
[[package]]
name = "eszip"
version = "0.38.0"
version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7291206febe844df2138e11a419ac22b6a9d02931663a9cd58a8f3989116ae5e"
checksum = "207f6568e7dde0c18eb306af104c4e7fe91f77eb99afeffd13f9c7735de4bb4d"
dependencies = [
"anyhow",
"base64 0.21.0",
@ -1738,7 +1771,7 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479cde5eb168cf5a056dd98f311cbfab7494c216394e4fb9eba0336827a8db93"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -1786,7 +1819,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0981e470d2ab9f643df3921d54f1952ea100c39fdb6a3fdc820e20d2291df6c"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"swc_macros_common",
"syn 1.0.109",
]
@ -1823,9 +1856,9 @@ dependencies = [
[[package]]
name = "futures"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "531ac96c6ff5fd7c62263c5e3c67a603af4fcaee2e1a0ae5565ba3a11e69e549"
checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40"
dependencies = [
"futures-channel",
"futures-core",
@ -1838,9 +1871,9 @@ dependencies = [
[[package]]
name = "futures-channel"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "164713a5a0dcc3e7b4b1ed7d3b433cabc18025386f9339346e8daf15963cf7ac"
checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2"
dependencies = [
"futures-core",
"futures-sink",
@ -1848,15 +1881,15 @@ dependencies = [
[[package]]
name = "futures-core"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86d7a0c1aa76363dac491de0ee99faf6941128376f1cf96f07db7603b7de69dd"
checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
[[package]]
name = "futures-executor"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1997dd9df74cdac935c76252744c1ed5794fac083242ea4fe77ef3ed60ba0f83"
checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0"
dependencies = [
"futures-core",
"futures-task",
@ -1865,38 +1898,38 @@ dependencies = [
[[package]]
name = "futures-io"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89d422fa3cbe3b40dca574ab087abb5bc98258ea57eea3fd6f1fa7162c778b91"
checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
[[package]]
name = "futures-macro"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3eb14ed937631bd8b8b8977f2c198443447a8355b6e3ca599f38c975e5a963b6"
checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
"syn 2.0.13",
]
[[package]]
name = "futures-sink"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec93083a4aecafb2a80a885c9de1f0ccae9dbd32c2bb54b0c3a65690e0b8d2f2"
checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e"
[[package]]
name = "futures-task"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd65540d33b37b16542a0438c12e6aeead10d4ac5d05bd3f805b8f35ab592879"
checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
[[package]]
name = "futures-util"
version = "0.3.27"
version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ef6b17e481503ec85211fed8f39d1970f128935ca1f814cd32ac4a6842e84ab"
checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
dependencies = [
"futures-channel",
"futures-core",
@ -2324,7 +2357,7 @@ checksum = "8a7d079e129b77477a49c5c4f1cfe9ce6c2c909ef52520693e8e811a714c7b20"
dependencies = [
"Inflector",
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -2734,7 +2767,7 @@ dependencies = [
name = "napi_sym"
version = "0.27.0"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"serde",
"serde_json",
@ -3090,7 +3123,7 @@ dependencies = [
"phf_generator",
"phf_shared",
"proc-macro-hack",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -3119,7 +3152,7 @@ version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -3170,7 +3203,7 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3894e5d549cccbe44afecf72922f277f603cd4bb0219c8342631ef18fffbe004"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -3217,7 +3250,7 @@ version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"syn 1.0.109",
]
@ -3238,7 +3271,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
dependencies = [
"proc-macro-error-attr",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
"version_check",
@ -3250,7 +3283,7 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"version_check",
]
@ -3272,9 +3305,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.52"
version = "1.0.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
dependencies = [
"unicode-ident",
]
@ -3330,7 +3363,7 @@ version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
]
[[package]]
@ -3648,7 +3681,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -3791,9 +3824,9 @@ version = "1.0.157"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78997f4555c22a7971214540c4a661291970619afd56de19f77e0de86296e1e5"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 2.0.0",
"syn 2.0.13",
]
[[package]]
@ -3814,7 +3847,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -4059,7 +4092,7 @@ checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
]
@ -4070,7 +4103,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41491e23e7db79343236a6ced96325ff132eb09e29ac4c5b8132b9c55aaaae89"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"swc_macros_common",
"syn 1.0.109",
@ -4180,7 +4213,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb64bc03d90fd5c90d6ab917bb2b1d7fbd31957df39e31ea24a3f554b4372251"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"swc_macros_common",
"syn 1.0.109",
@ -4229,7 +4262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0159c99f81f52e48fe692ef7af1b0990b45d3006b14c6629be0b1ffee1b23aea"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"swc_macros_common",
"syn 1.0.109",
@ -4325,7 +4358,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebf907935ec5492256b523ae7935a824d9fdc0368dcadc41375bad0dca91cd8b"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"swc_macros_common",
"syn 1.0.109",
@ -4456,7 +4489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c20468634668c2bbab581947bb8c75c97158d5a6959f4ba33df20983b20b4f6"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -4493,7 +4526,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4be988307882648d9bc7c71a6a73322b7520ef0211e920489a98f8391d8caa2"
dependencies = [
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -4516,7 +4549,7 @@ checksum = "6098b717cfd4c85f5cddec734af191dbce461c39975ed567c32ac6d0c6d61a6d"
dependencies = [
"Inflector",
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"swc_macros_common",
"syn 1.0.109",
@ -4539,18 +4572,18 @@ version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.0"
version = "2.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cff13bb1732bccfe3b246f3fdb09edfd51c01d6f5299b7ccd9457c2e4e37774"
checksum = "4c9da457c5285ac1f936ebd076af6dac17a61cfe7826f2076b4d015cf47bc8ec"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"unicode-ident",
]
@ -4561,7 +4594,7 @@ version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
"unicode-xid 0.2.4",
@ -4663,7 +4696,7 @@ dependencies = [
"glob",
"once_cell",
"pmutil",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"regex",
"relative-path",
@ -4700,7 +4733,7 @@ version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -4762,7 +4795,7 @@ version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -4906,7 +4939,7 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ebd99eec668d0a450c177acbc4d05e0d0d13b1f8d3db13cd706c52cbec4ac04"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -4935,7 +4968,7 @@ version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
]
@ -5324,7 +5357,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
]
@ -5380,7 +5413,7 @@ dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
"wasm-bindgen-shared",
@ -5414,7 +5447,7 @@ version = "0.2.84"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
"wasm-bindgen-backend",
@ -5666,7 +5699,7 @@ version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44bf07cb3e50ea2003396695d58bf46bc9887a1f362260446fad6bc4e79bd36c"
dependencies = [
"proc-macro2 1.0.52",
"proc-macro2 1.0.56",
"quote 1.0.26",
"syn 1.0.109",
"synstructure",

View file

@ -42,12 +42,14 @@ winres.workspace = true
[dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "dep_graph", "module_specifier", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }
deno_doc = "0.59.0"
deno_emit = "0.17.0"
deno_graph = "=0.45.0"
deno_doc = "0.60.0"
deno_emit = "0.18.0"
deno_graph = "=0.46.0"
deno_lint = { version = "0.43.0", features = ["docs"] }
deno_lockfile.workspace = true
deno_npm = "0.1.0"
deno_runtime = { workspace = true, features = ["dont_create_runtime_snapshot", "include_js_files_for_snapshotting"] }
deno_semver = "0.2.0"
deno_task_shell = "0.10.0"
napi_sym.workspace = true
@ -68,7 +70,7 @@ dprint-plugin-markdown = "=0.15.2"
dprint-plugin-typescript = "=0.84.0"
encoding_rs.workspace = true
env_logger = "=0.9.0"
eszip = "=0.38.0"
eszip = "=0.39.0"
fancy-regex = "=0.10.0"
flate2.workspace = true
fs3.workspace = true

View file

@ -1,18 +1,30 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use deno_core::error::AnyError;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::StreamExt;
use deno_core::parking_lot::Mutex;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::NpmResolutionSnapshotCreateOptions;
use deno_npm::resolution::NpmResolutionSnapshotCreateOptionsPackage;
use deno_npm::NpmPackageId;
use deno_semver::npm::NpmPackageReq;
use crate::args::config_file::LockConfig;
use crate::args::ConfigFile;
use crate::npm::NpmResolutionPackage;
use crate::Flags;
use super::DenoSubcommand;
pub use deno_lockfile::Lockfile;
pub use deno_lockfile::LockfileError;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
pub fn discover(
flags: &Flags,
@ -61,24 +73,68 @@ pub fn discover(
Ok(Some(lockfile))
}
// NOTE(bartlomieju): we don't want a reverse mapping to be possible.
#[allow(clippy::from_over_into)]
impl Into<NpmPackageLockfileInfo> for NpmResolutionPackage {
fn into(self) -> NpmPackageLockfileInfo {
let dependencies = self
.dependencies
.into_iter()
.map(|(name, id)| NpmPackageDependencyLockfileInfo {
name,
id: id.as_serialized(),
})
.collect();
pub async fn snapshot_from_lockfile(
lockfile: Arc<Mutex<Lockfile>>,
api: &dyn NpmRegistryApi,
) -> Result<NpmResolutionSnapshot, AnyError> {
let (root_packages, mut packages) = {
let lockfile = lockfile.lock();
NpmPackageLockfileInfo {
display_id: self.pkg_id.nv.to_string(),
serialized_id: self.pkg_id.as_serialized(),
integrity: self.dist.integrity().to_string(),
dependencies,
let mut root_packages =
HashMap::<NpmPackageReq, NpmPackageId>::with_capacity(
lockfile.content.npm.specifiers.len(),
);
// collect the specifiers to version mappings
for (key, value) in &lockfile.content.npm.specifiers {
let package_req = NpmPackageReq::from_str(key)
.with_context(|| format!("Unable to parse npm specifier: {key}"))?;
let package_id = NpmPackageId::from_serialized(value)?;
root_packages.insert(package_req, package_id.clone());
}
// now fill the packages except for the dist information
let mut packages = Vec::with_capacity(lockfile.content.npm.packages.len());
for (key, package) in &lockfile.content.npm.packages {
let pkg_id = NpmPackageId::from_serialized(key)?;
// collect the dependencies
let mut dependencies = HashMap::with_capacity(package.dependencies.len());
for (name, specifier) in &package.dependencies {
let dep_id = NpmPackageId::from_serialized(specifier)?;
dependencies.insert(name.clone(), dep_id);
}
packages.push(NpmResolutionSnapshotCreateOptionsPackage {
pkg_id,
dist: Default::default(), // temporarily empty
dependencies,
});
}
(root_packages, packages)
};
// now that the lockfile is dropped, fetch the package version information
let mut version_infos =
FuturesOrdered::from_iter(packages.iter().map(|p| p.pkg_id.nv.clone()).map(
|nv| async move {
match api.package_version_info(&nv).await? {
Some(version_info) => Ok(version_info),
None => {
bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", nv);
}
}
},
));
let mut i = 0;
while let Some(version_info) = version_infos.next().await {
packages[i].dist = version_info?.dist;
i += 1;
}
NpmResolutionSnapshot::from_packages(NpmResolutionSnapshotCreateOptions {
packages,
root_packages,
})
.context("The lockfile is corrupt. You can recreate it with --lock-write")
}

View file

@ -8,14 +8,14 @@ mod lockfile;
pub mod package_json;
pub use self::import_map::resolve_import_map_from_specifier;
use self::lockfile::snapshot_from_lockfile;
use self::package_json::PackageJsonDeps;
use ::import_map::ImportMap;
use deno_core::resolve_url_or_path;
use deno_graph::npm::NpmPackageReqReference;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_semver::npm::NpmPackageReqReference;
use indexmap::IndexMap;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmResolutionSnapshot;
pub use config_file::BenchConfig;
pub use config_file::CompilerOptions;
pub use config_file::ConfigFile;
@ -65,6 +65,7 @@ use std::sync::Arc;
use crate::cache::DenoDir;
use crate::file_fetcher::FileFetcher;
use crate::npm::NpmProcessState;
use crate::npm::NpmRegistry;
use crate::util::fs::canonicalize_path_maybe_not_exists;
use crate::version;
@ -743,7 +744,7 @@ impl CliOptions {
pub async fn resolve_npm_resolution_snapshot(
&self,
api: &NpmRegistryApi,
api: &NpmRegistry,
) -> Result<Option<NpmResolutionSnapshot>, AnyError> {
if let Some(state) = &*NPM_PROCESS_STATE {
// TODO(bartlomieju): remove this clone
@ -753,7 +754,7 @@ impl CliOptions {
if let Some(lockfile) = self.maybe_lock_file() {
if !lockfile.lock().overwrite {
return Ok(Some(
NpmResolutionSnapshot::from_lockfile(lockfile.clone(), api)
snapshot_from_lockfile(lockfile.clone(), api)
.await
.with_context(|| {
format!(

View file

@ -7,41 +7,18 @@ use std::path::PathBuf;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::NpmVersionReqSpecifierParseError;
use deno_graph::semver::VersionReq;
use deno_npm::registry::parse_dep_entry_name_and_raw_version;
use deno_npm::registry::PackageDepNpmSchemeValueParseError;
use deno_runtime::deno_node::PackageJson;
use deno_semver::npm::NpmPackageReq;
use deno_semver::npm::NpmVersionReqSpecifierParseError;
use deno_semver::VersionReq;
use thiserror::Error;
#[derive(Debug, Clone, Error, PartialEq, Eq, Hash)]
#[error("Could not find @ symbol in npm url '{value}'")]
pub struct PackageJsonDepNpmSchemeValueParseError {
pub value: String,
}
/// Gets the name and raw version constraint taking into account npm
/// package aliases.
pub fn parse_dep_entry_name_and_raw_version<'a>(
key: &'a str,
value: &'a str,
) -> Result<(&'a str, &'a str), PackageJsonDepNpmSchemeValueParseError> {
if let Some(package_and_version) = value.strip_prefix("npm:") {
if let Some((name, version)) = package_and_version.rsplit_once('@') {
Ok((name, version))
} else {
Err(PackageJsonDepNpmSchemeValueParseError {
value: value.to_string(),
})
}
} else {
Ok((key, value))
}
}
#[derive(Debug, Error, Clone, Hash)]
#[derive(Debug, Error, Clone)]
pub enum PackageJsonDepValueParseError {
#[error(transparent)]
SchemeValue(#[from] PackageJsonDepNpmSchemeValueParseError),
SchemeValue(#[from] PackageDepNpmSchemeValueParseError),
#[error(transparent)]
Specifier(#[from] NpmVersionReqSpecifierParseError),
#[error("Not implemented scheme '{scheme}'")]

View file

@ -27,12 +27,12 @@ use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::ModuleSpecifier;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Resolution;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_lint::rules::LintRule;
use deno_runtime::tokio_util::create_basic_runtime;
use deno_semver::npm::NpmPackageReqReference;
use log::error;
use std::collections::HashMap;
use std::sync::Arc;

View file

@ -21,7 +21,7 @@ use crate::node;
use crate::node::node_resolve_npm_reference;
use crate::node::NodeResolution;
use crate::npm::NpmPackageResolver;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmRegistry;
use crate::npm::NpmResolution;
use crate::npm::PackageJsonDepsInstaller;
use crate::resolver::CliGraphResolver;
@ -37,16 +37,17 @@ use deno_core::futures::future;
use deno_core::parking_lot::Mutex;
use deno_core::url;
use deno_core::ModuleSpecifier;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::GraphImport;
use deno_graph::Resolution;
use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::deno_node::PackageJson;
use deno_runtime::permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReq;
use deno_semver::npm::NpmPackageReqReference;
use indexmap::IndexMap;
use lsp::Url;
use once_cell::sync::Lazy;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
@ -1165,7 +1166,7 @@ impl Documents {
maybe_import_map: Option<Arc<import_map::ImportMap>>,
maybe_config_file: Option<&ConfigFile>,
maybe_package_json: Option<&PackageJson>,
npm_registry_api: NpmRegistryApi,
npm_registry_api: NpmRegistry,
npm_resolution: NpmResolution,
) {
fn calculate_resolver_config_hash(
@ -1186,7 +1187,23 @@ impl Documents {
hasher.write_str(import_map.base_url().as_str());
}
hasher.write_hashable(&maybe_jsx_config);
hasher.write_hashable(&maybe_package_json_deps);
if let Some(package_json_deps) = &maybe_package_json_deps {
// We need to ensure the hashing is deterministic so explicitly type
// this in order to catch if the type of package_json_deps ever changes
// from a sorted/deterministic BTreeMap to something else.
let package_json_deps: &BTreeMap<_, _> = *package_json_deps;
for (key, value) in package_json_deps {
hasher.write_hashable(key);
match value {
Ok(value) => {
hasher.write_hashable(value);
}
Err(err) => {
hasher.write_str(&err.to_string());
}
}
}
}
hasher.finish()
}
@ -1847,7 +1864,7 @@ console.log(b, "hello deno");
#[test]
fn test_documents_refresh_dependencies_config_change() {
let npm_registry_api = NpmRegistryApi::new_uninitialized();
let npm_registry_api = NpmRegistry::new_uninitialized();
let npm_resolution =
NpmResolution::new(npm_registry_api.clone(), None, None);

View file

@ -81,7 +81,7 @@ use crate::lsp::urls::LspUrlKind;
use crate::npm::create_npm_fs_resolver;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmRegistry;
use crate::npm::NpmResolution;
use crate::proc_state::ProcState;
use crate::tools::fmt::format_file;
@ -145,7 +145,7 @@ pub struct Inner {
/// A lazily create "server" for handling test run requests.
maybe_testing_server: Option<testing::TestServer>,
/// Npm's registry api.
npm_api: NpmRegistryApi,
npm_api: NpmRegistry,
/// Npm cache
npm_cache: NpmCache,
/// Npm resolution that is stored in memory.
@ -417,8 +417,8 @@ impl LanguageServer {
fn create_lsp_structs(
dir: &DenoDir,
http_client: HttpClient,
) -> (NpmRegistryApi, NpmCache, NpmPackageResolver, NpmResolution) {
let registry_url = NpmRegistryApi::default_url();
) -> (NpmRegistry, NpmCache, NpmPackageResolver, NpmResolution) {
let registry_url = NpmRegistry::default_url();
let progress_bar = ProgressBar::new(ProgressBarStyle::TextOnly);
let npm_cache = NpmCache::from_deno_dir(
dir,
@ -430,7 +430,7 @@ fn create_lsp_structs(
http_client.clone(),
progress_bar.clone(),
);
let api = NpmRegistryApi::new(
let api = NpmRegistry::new(
registry_url.clone(),
npm_cache.clone(),
http_client,

View file

@ -15,8 +15,6 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::serde_json::Value;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageNvReference;
use deno_runtime::deno_node;
use deno_runtime::deno_node::errors;
use deno_runtime::deno_node::find_builtin_node_module;
@ -35,6 +33,8 @@ use deno_runtime::deno_node::RealFs;
use deno_runtime::deno_node::RequireNpmResolver;
use deno_runtime::deno_node::DEFAULT_CONDITIONS;
use deno_runtime::permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageNvReference;
use once_cell::sync::Lazy;
use regex::Regex;

View file

@ -13,8 +13,10 @@ use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_npm::NpmPackageCacheFolderId;
use deno_semver::npm::NpmPackageNv;
use deno_semver::Version;
use once_cell::sync::Lazy;
use crate::args::CacheSetting;
@ -25,7 +27,6 @@ use crate::util::fs::hard_link_dir_recursive;
use crate::util::path::root_url_to_safe_local_dirname;
use crate::util::progress_bar::ProgressBar;
use super::registry::NpmPackageVersionDistInfo;
use super::tarball::verify_and_extract_tarball;
static SHOULD_SYNC_DOWNLOAD: Lazy<bool> =
@ -112,32 +113,6 @@ pub fn with_folder_sync_lock(
}
}
pub struct NpmPackageCacheFolderId {
pub nv: NpmPackageNv,
/// Peer dependency resolution may require us to have duplicate copies
/// of the same package.
pub copy_index: usize,
}
impl NpmPackageCacheFolderId {
pub fn with_no_count(&self) -> Self {
Self {
nv: self.nv.clone(),
copy_index: 0,
}
}
}
impl std::fmt::Display for NpmPackageCacheFolderId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.nv)?;
if self.copy_index > 0 {
write!(f, "_{}", self.copy_index)?;
}
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct ReadonlyNpmCache {
root_dir: PathBuf,
@ -515,8 +490,8 @@ pub fn mixed_case_package_name_decode(name: &str) -> Option<String> {
#[cfg(test)]
mod test {
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use deno_semver::npm::NpmPackageNv;
use deno_semver::Version;
use super::ReadonlyNpmCache;
use crate::npm::cache::NpmPackageCacheFolderId;

View file

@ -4,16 +4,19 @@ use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::StreamExt;
use deno_npm::registry::NpmRegistryApi;
use crate::args::package_json::PackageJsonDeps;
use super::NpmRegistryApi;
use super::NpmRegistry;
use super::NpmResolution;
#[derive(Debug)]
struct PackageJsonDepsInstallerInner {
has_installed: AtomicBool,
npm_registry_api: NpmRegistryApi,
npm_registry_api: NpmRegistry,
npm_resolution: NpmResolution,
package_deps: PackageJsonDeps,
}
@ -24,7 +27,7 @@ pub struct PackageJsonDepsInstaller(Option<Arc<PackageJsonDepsInstallerInner>>);
impl PackageJsonDepsInstaller {
pub fn new(
npm_registry_api: NpmRegistryApi,
npm_registry_api: NpmRegistry,
npm_resolution: NpmResolution,
deps: Option<PackageJsonDeps>,
) -> Self {
@ -76,17 +79,20 @@ impl PackageJsonDepsInstaller {
.collect::<Vec<_>>();
package_reqs.sort(); // deterministic resolution
inner
.npm_registry_api
.cache_in_parallel(
package_reqs.iter().map(|req| req.name.clone()).collect(),
)
.await?;
let mut req_with_infos =
FuturesOrdered::from_iter(package_reqs.into_iter().map(|req| {
let api = inner.npm_registry_api.clone();
async move {
let info = api.package_info(&req.name).await?;
Ok::<_, AnyError>((req, info))
}
}));
for package_req in package_reqs {
while let Some(result) = req_with_infos.next().await {
let (req, info) = result?;
inner
.npm_resolution
.resolve_package_req_as_pending(package_req)?;
.resolve_package_req_as_pending_with_info(req, &info)?;
}
Ok(())

View file

@ -10,15 +10,8 @@ mod tarball;
pub use cache::should_sync_download;
pub use cache::NpmCache;
pub use installer::PackageJsonDepsInstaller;
#[cfg(test)]
pub use registry::NpmPackageVersionDistInfo;
pub use registry::NpmRegistryApi;
#[cfg(test)]
pub use registry::TestNpmRegistryApiInner;
pub use resolution::NpmPackageId;
pub use registry::NpmRegistry;
pub use resolution::NpmResolution;
pub use resolution::NpmResolutionPackage;
pub use resolution::NpmResolutionSnapshot;
pub use resolvers::create_npm_fs_resolver;
pub use resolvers::NpmPackageResolver;
pub use resolvers::NpmProcessState;

View file

@ -1,7 +1,5 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::HashSet;
use std::fs;
@ -10,21 +8,21 @@ use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::anyhow::bail;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::Context;
use deno_core::error::custom_error;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::future::BoxFuture;
use deno_core::futures::future::Shared;
use deno_core::futures::FutureExt;
use deno_core::parking_lot::Mutex;
use deno_core::serde::Deserialize;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::VersionReq;
use deno_core::TaskQueue;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use once_cell::sync::Lazy;
use serde::Serialize;
use crate::args::package_json::parse_dep_entry_name_and_raw_version;
use crate::args::CacheSetting;
use crate::cache::CACHE_PERM;
use crate::http_util::HttpClient;
@ -34,162 +32,6 @@ use crate::util::progress_bar::ProgressBar;
use super::cache::should_sync_download;
use super::cache::NpmCache;
// npm registry docs: https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md
#[derive(Debug, Default, Deserialize, Serialize, Clone)]
pub struct NpmPackageInfo {
pub name: String,
pub versions: HashMap<String, NpmPackageVersionInfo>,
#[serde(rename = "dist-tags")]
pub dist_tags: HashMap<String, String>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum NpmDependencyEntryKind {
Dep,
Peer,
OptionalPeer,
}
impl NpmDependencyEntryKind {
pub fn is_optional(&self) -> bool {
matches!(self, NpmDependencyEntryKind::OptionalPeer)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct NpmDependencyEntry {
pub kind: NpmDependencyEntryKind,
pub bare_specifier: String,
pub name: String,
pub version_req: VersionReq,
/// When the dependency is also marked as a peer dependency,
/// use this entry to resolve the dependency when it can't
/// be resolved as a peer dependency.
pub peer_dep_version_req: Option<VersionReq>,
}
impl PartialOrd for NpmDependencyEntry {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for NpmDependencyEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
// sort the dependencies alphabetically by name then by version descending
match self.name.cmp(&other.name) {
// sort by newest to oldest
Ordering::Equal => other
.version_req
.version_text()
.cmp(self.version_req.version_text()),
ordering => ordering,
}
}
}
#[derive(Debug, Default, Deserialize, Serialize, Clone, PartialEq, Eq)]
pub struct NpmPeerDependencyMeta {
#[serde(default)]
optional: bool,
}
#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)]
#[serde(untagged)]
pub enum NpmPackageVersionBinEntry {
String(String),
Map(HashMap<String, String>),
}
#[derive(Debug, Default, Deserialize, Serialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct NpmPackageVersionInfo {
pub version: String,
pub dist: NpmPackageVersionDistInfo,
pub bin: Option<NpmPackageVersionBinEntry>,
// Bare specifier to version (ex. `"typescript": "^3.0.1") or possibly
// package and version (ex. `"typescript-3.0.1": "npm:typescript@3.0.1"`).
#[serde(default)]
pub dependencies: HashMap<String, String>,
#[serde(default)]
pub peer_dependencies: HashMap<String, String>,
#[serde(default)]
pub peer_dependencies_meta: HashMap<String, NpmPeerDependencyMeta>,
}
impl NpmPackageVersionInfo {
pub fn dependencies_as_entries(
&self,
) -> Result<Vec<NpmDependencyEntry>, AnyError> {
fn parse_dep_entry(
(key, value): (&String, &String),
kind: NpmDependencyEntryKind,
) -> Result<NpmDependencyEntry, AnyError> {
let (name, version_req) =
parse_dep_entry_name_and_raw_version(key, value)?;
let version_req =
VersionReq::parse_from_npm(version_req).with_context(|| {
format!("error parsing version requirement for dependency: {key}@{version_req}")
})?;
Ok(NpmDependencyEntry {
kind,
bare_specifier: key.to_string(),
name: name.to_string(),
version_req,
peer_dep_version_req: None,
})
}
let mut result = HashMap::with_capacity(
self.dependencies.len() + self.peer_dependencies.len(),
);
for entry in &self.peer_dependencies {
let is_optional = self
.peer_dependencies_meta
.get(entry.0)
.map(|d| d.optional)
.unwrap_or(false);
let kind = match is_optional {
true => NpmDependencyEntryKind::OptionalPeer,
false => NpmDependencyEntryKind::Peer,
};
let entry = parse_dep_entry(entry, kind)?;
result.insert(entry.bare_specifier.clone(), entry);
}
for entry in &self.dependencies {
let entry = parse_dep_entry(entry, NpmDependencyEntryKind::Dep)?;
// people may define a dependency as a peer dependency as well,
// so in those cases, attempt to resolve as a peer dependency,
// but then use this dependency version requirement otherwise
if let Some(peer_dep_entry) = result.get_mut(&entry.bare_specifier) {
peer_dep_entry.peer_dep_version_req = Some(entry.version_req);
} else {
result.insert(entry.bare_specifier.clone(), entry);
}
}
Ok(result.into_values().collect())
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmPackageVersionDistInfo {
/// URL to the tarball.
pub tarball: String,
shasum: String,
integrity: Option<String>,
}
impl NpmPackageVersionDistInfo {
pub fn integrity(&self) -> Cow<String> {
self
.integrity
.as_ref()
.map(Cow::Borrowed)
.unwrap_or_else(|| Cow::Owned(format!("sha1-{}", self.shasum)))
}
}
static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
let env_var_name = "NPM_CONFIG_REGISTRY";
if let Ok(registry_url) = std::env::var(env_var_name) {
@ -209,9 +51,9 @@ static NPM_REGISTRY_DEFAULT_URL: Lazy<Url> = Lazy::new(|| {
});
#[derive(Clone, Debug)]
pub struct NpmRegistryApi(Arc<dyn NpmRegistryApiInner>);
pub struct NpmRegistry(Option<Arc<NpmRegistryApiInner>>);
impl NpmRegistryApi {
impl NpmRegistry {
pub fn default_url() -> &'static Url {
&NPM_REGISTRY_DEFAULT_URL
}
@ -222,188 +64,142 @@ impl NpmRegistryApi {
http_client: HttpClient,
progress_bar: ProgressBar,
) -> Self {
Self(Arc::new(RealNpmRegistryApiInner {
Self(Some(Arc::new(NpmRegistryApiInner {
base_url,
cache,
mem_cache: Default::default(),
previously_reloaded_packages: Default::default(),
http_client,
progress_bar,
}))
})))
}
/// Creates an npm registry API that will be uninitialized
/// and error for every request. This is useful for tests
/// or for initializing the LSP.
/// Creates an npm registry API that will be uninitialized. This is
/// useful for tests or for initializing the LSP.
pub fn new_uninitialized() -> Self {
Self(Arc::new(NullNpmRegistryApiInner))
}
#[cfg(test)]
pub fn new_for_test(api: TestNpmRegistryApiInner) -> NpmRegistryApi {
Self(Arc::new(api))
}
pub async fn package_info(
&self,
name: &str,
) -> Result<Arc<NpmPackageInfo>, AnyError> {
let maybe_package_info = self.0.maybe_package_info(name).await?;
match maybe_package_info {
Some(package_info) => Ok(package_info),
None => bail!("npm package '{}' does not exist", name),
}
}
pub async fn package_version_info(
&self,
nv: &NpmPackageNv,
) -> Result<Option<NpmPackageVersionInfo>, AnyError> {
let package_info = self.package_info(&nv.name).await?;
Ok(package_info.versions.get(&nv.version.to_string()).cloned())
}
/// Caches all the package information in memory in parallel.
pub async fn cache_in_parallel(
&self,
package_names: Vec<String>,
) -> Result<(), AnyError> {
let mut unresolved_tasks = Vec::with_capacity(package_names.len());
// cache the package info up front in parallel
if should_sync_download() {
// for deterministic test output
let mut ordered_names = package_names;
ordered_names.sort();
for name in ordered_names {
self.package_info(&name).await?;
}
} else {
for name in package_names {
let api = self.clone();
unresolved_tasks.push(tokio::task::spawn(async move {
// This is ok to call because api will internally cache
// the package information in memory.
api.package_info(&name).await
}));
}
};
for result in futures::future::join_all(unresolved_tasks).await {
result??; // surface the first error
}
Ok(())
Self(None)
}
/// Clears the internal memory cache.
pub fn clear_memory_cache(&self) {
self.0.clear_memory_cache();
self.inner().clear_memory_cache();
}
pub fn get_cached_package_info(
&self,
name: &str,
) -> Option<Arc<NpmPackageInfo>> {
self.0.get_cached_package_info(name)
self.inner().get_cached_package_info(name)
}
pub fn base_url(&self) -> &Url {
self.0.base_url()
&self.inner().base_url
}
fn inner(&self) -> &Arc<NpmRegistryApiInner> {
// this panicking indicates a bug in the code where this
// wasn't initialized
self.0.as_ref().unwrap()
}
}
#[async_trait]
trait NpmRegistryApiInner: std::fmt::Debug + Sync + Send + 'static {
async fn maybe_package_info(
&self,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError>;
fn clear_memory_cache(&self);
fn get_cached_package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>>;
fn base_url(&self) -> &Url;
}
static SYNC_DOWNLOAD_TASK_QUEUE: Lazy<TaskQueue> =
Lazy::new(TaskQueue::default);
#[async_trait]
impl NpmRegistryApiInner for RealNpmRegistryApiInner {
fn base_url(&self) -> &Url {
&self.base_url
}
impl NpmRegistryApi for NpmRegistry {
async fn maybe_package_info(
&self,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
self.maybe_package_info(name).await
}
fn clear_memory_cache(&self) {
self.mem_cache.lock().clear();
}
fn get_cached_package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>> {
self.mem_cache.lock().get(name).cloned().flatten()
if should_sync_download() {
let inner = self.inner().clone();
SYNC_DOWNLOAD_TASK_QUEUE
.queue(async move { inner.maybe_package_info(name).await })
.await
} else {
self.inner().maybe_package_info(name).await
}
}
}
#[derive(Debug)]
struct RealNpmRegistryApiInner {
enum CacheItem {
Pending(
Shared<BoxFuture<'static, Result<Option<Arc<NpmPackageInfo>>, String>>>,
),
Resolved(Option<Arc<NpmPackageInfo>>),
}
#[derive(Debug)]
struct NpmRegistryApiInner {
base_url: Url,
cache: NpmCache,
mem_cache: Mutex<HashMap<String, Option<Arc<NpmPackageInfo>>>>,
mem_cache: Mutex<HashMap<String, CacheItem>>,
previously_reloaded_packages: Mutex<HashSet<String>>,
http_client: HttpClient,
progress_bar: ProgressBar,
}
impl RealNpmRegistryApiInner {
impl NpmRegistryApiInner {
pub async fn maybe_package_info(
&self,
self: &Arc<Self>,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
let maybe_maybe_info = self.mem_cache.lock().get(name).cloned();
if let Some(maybe_info) = maybe_maybe_info {
Ok(maybe_info)
} else {
let mut maybe_package_info = None;
if self.cache.cache_setting().should_use_for_npm_package(name)
let (created, future) = {
let mut mem_cache = self.mem_cache.lock();
match mem_cache.get(name) {
Some(CacheItem::Resolved(maybe_info)) => {
return Ok(maybe_info.clone());
}
Some(CacheItem::Pending(future)) => (false, future.clone()),
None => {
if self.cache.cache_setting().should_use_for_npm_package(name)
// if this has been previously reloaded, then try loading from the
// file system cache
|| !self.previously_reloaded_packages.lock().insert(name.to_string())
{
// attempt to load from the file cache
maybe_package_info = self.load_file_cached_package_info(name);
}
{
// attempt to load from the file cache
if let Some(info) = self.load_file_cached_package_info(name) {
let result = Some(Arc::new(info));
mem_cache
.insert(name.to_string(), CacheItem::Resolved(result.clone()));
return Ok(result);
}
}
if maybe_package_info.is_none() {
maybe_package_info = self
.load_package_info_from_registry(name)
.await
.with_context(|| {
format!(
"Error getting response at {} for package \"{}\"",
self.get_package_url(name),
name
)
})?;
}
let maybe_package_info = maybe_package_info.map(Arc::new);
// Not worth the complexity to ensure multiple in-flight requests
// for the same package only request once because with how this is
// used that should never happen.
let mut mem_cache = self.mem_cache.lock();
Ok(match mem_cache.get(name) {
// another thread raced here, so use its result instead
Some(info) => info.clone(),
None => {
mem_cache.insert(name.to_string(), maybe_package_info.clone());
maybe_package_info
let future = {
let api = self.clone();
let name = name.to_string();
async move { api.load_package_info_from_registry(&name).await }
.boxed()
.shared()
};
mem_cache
.insert(name.to_string(), CacheItem::Pending(future.clone()));
(true, future)
}
})
}
};
if created {
match future.await {
Ok(maybe_info) => {
// replace the cache item to say it's resolved now
self
.mem_cache
.lock()
.insert(name.to_string(), CacheItem::Resolved(maybe_info.clone()));
Ok(maybe_info)
}
Err(err) => {
// purge the item from the cache so it loads next time
self.mem_cache.lock().remove(name);
Err(anyhow!("{}", err))
}
}
} else {
Ok(future.await.map_err(|err| anyhow!("{}", err))?)
}
}
@ -478,6 +274,25 @@ impl RealNpmRegistryApiInner {
async fn load_package_info_from_registry(
&self,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, String> {
self
.load_package_info_from_registry_inner(name)
.await
.with_context(|| {
format!(
"Error getting response at {} for package \"{}\"",
self.get_package_url(name),
name
)
})
.map(|info| info.map(Arc::new))
// make cloneable
.map_err(|err| format!("{err:#}"))
}
async fn load_package_info_from_registry_inner(
&self,
name: &str,
) -> Result<Option<NpmPackageInfo>, AnyError> {
if *self.cache.cache_setting() == CacheSetting::Only {
return Err(custom_error(
@ -513,206 +328,20 @@ impl RealNpmRegistryApiInner {
let name_folder_path = self.cache.package_name_folder(name, &self.base_url);
name_folder_path.join("registry.json")
}
}
#[derive(Debug)]
struct NullNpmRegistryApiInner;
#[async_trait]
impl NpmRegistryApiInner for NullNpmRegistryApiInner {
async fn maybe_package_info(
&self,
_name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
Err(deno_core::anyhow::anyhow!(
"Deno bug. Please report. Registry API was not initialized."
))
pub fn clear_memory_cache(&self) {
self.mem_cache.lock().clear();
}
fn clear_memory_cache(&self) {}
fn get_cached_package_info(
&self,
_name: &str,
) -> Option<Arc<NpmPackageInfo>> {
None
}
fn base_url(&self) -> &Url {
NpmRegistryApi::default_url()
}
}
/// Note: This test struct is not thread safe for setup
/// purposes. Construct everything on the same thread.
#[cfg(test)]
#[derive(Clone, Default, Debug)]
pub struct TestNpmRegistryApiInner {
package_infos: Arc<Mutex<HashMap<String, NpmPackageInfo>>>,
}
#[cfg(test)]
impl TestNpmRegistryApiInner {
pub fn add_package_info(&self, name: &str, info: NpmPackageInfo) {
let previous = self.package_infos.lock().insert(name.to_string(), info);
assert!(previous.is_none());
}
pub fn ensure_package(&self, name: &str) {
if !self.package_infos.lock().contains_key(name) {
self.add_package_info(
name,
NpmPackageInfo {
name: name.to_string(),
..Default::default()
},
);
}
}
pub fn ensure_package_version(&self, name: &str, version: &str) {
self.ensure_package(name);
let mut infos = self.package_infos.lock();
let info = infos.get_mut(name).unwrap();
if !info.versions.contains_key(version) {
info.versions.insert(
version.to_string(),
NpmPackageVersionInfo {
version: version.to_string(),
..Default::default()
},
);
}
}
pub fn add_dependency(
&self,
package_from: (&str, &str),
package_to: (&str, &str),
) {
let mut infos = self.package_infos.lock();
let info = infos.get_mut(package_from.0).unwrap();
let version = info.versions.get_mut(package_from.1).unwrap();
version
.dependencies
.insert(package_to.0.to_string(), package_to.1.to_string());
}
pub fn add_dist_tag(&self, package_name: &str, tag: &str, version: &str) {
let mut infos = self.package_infos.lock();
let info = infos.get_mut(package_name).unwrap();
info.dist_tags.insert(tag.to_string(), version.to_string());
}
pub fn add_peer_dependency(
&self,
package_from: (&str, &str),
package_to: (&str, &str),
) {
let mut infos = self.package_infos.lock();
let info = infos.get_mut(package_from.0).unwrap();
let version = info.versions.get_mut(package_from.1).unwrap();
version
.peer_dependencies
.insert(package_to.0.to_string(), package_to.1.to_string());
}
pub fn add_optional_peer_dependency(
&self,
package_from: (&str, &str),
package_to: (&str, &str),
) {
let mut infos = self.package_infos.lock();
let info = infos.get_mut(package_from.0).unwrap();
let version = info.versions.get_mut(package_from.1).unwrap();
version
.peer_dependencies
.insert(package_to.0.to_string(), package_to.1.to_string());
version.peer_dependencies_meta.insert(
package_to.0.to_string(),
NpmPeerDependencyMeta { optional: true },
);
}
}
#[cfg(test)]
#[async_trait]
impl NpmRegistryApiInner for TestNpmRegistryApiInner {
async fn maybe_package_info(
pub fn get_cached_package_info(
&self,
name: &str,
) -> Result<Option<Arc<NpmPackageInfo>>, AnyError> {
let result = self.package_infos.lock().get(name).cloned();
Ok(result.map(Arc::new))
}
fn clear_memory_cache(&self) {
// do nothing for the test api
}
fn get_cached_package_info(
&self,
_name: &str,
) -> Option<Arc<NpmPackageInfo>> {
None
}
fn base_url(&self) -> &Url {
NpmRegistryApi::default_url()
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use deno_core::serde_json;
use crate::npm::registry::NpmPackageVersionBinEntry;
use crate::npm::NpmPackageVersionDistInfo;
use super::NpmPackageVersionInfo;
#[test]
fn deserializes_minimal_pkg_info() {
let text = r#"{ "version": "1.0.0", "dist": { "tarball": "value", "shasum": "test" } }"#;
let info: NpmPackageVersionInfo = serde_json::from_str(text).unwrap();
assert_eq!(
info,
NpmPackageVersionInfo {
version: "1.0.0".to_string(),
dist: NpmPackageVersionDistInfo {
tarball: "value".to_string(),
shasum: "test".to_string(),
integrity: None,
},
bin: None,
dependencies: Default::default(),
peer_dependencies: Default::default(),
peer_dependencies_meta: Default::default()
}
);
}
#[test]
fn deserializes_bin_entry() {
// string
let text = r#"{ "version": "1.0.0", "bin": "bin-value", "dist": { "tarball": "value", "shasum": "test" } }"#;
let info: NpmPackageVersionInfo = serde_json::from_str(text).unwrap();
assert_eq!(
info.bin,
Some(NpmPackageVersionBinEntry::String("bin-value".to_string()))
);
// map
let text = r#"{ "version": "1.0.0", "bin": { "a": "a-value", "b": "b-value" }, "dist": { "tarball": "value", "shasum": "test" } }"#;
let info: NpmPackageVersionInfo = serde_json::from_str(text).unwrap();
assert_eq!(
info.bin,
Some(NpmPackageVersionBinEntry::Map(HashMap::from([
("a".to_string(), "a-value".to_string()),
("b".to_string(), "b-value".to_string()),
])))
);
let mem_cache = self.mem_cache.lock();
if let Some(CacheItem::Resolved(maybe_info)) = mem_cache.get(name) {
maybe_info.clone()
} else {
None
}
}
}

312
cli/npm/resolution.rs Normal file
View file

@ -0,0 +1,312 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::collections::HashSet;
use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::RwLock;
use deno_core::TaskQueue;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::resolution::NpmPackagesPartitioned;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_semver::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReq;
use deno_semver::npm::NpmPackageReqReference;
use crate::args::Lockfile;
use super::registry::NpmRegistry;
/// Handles updating and storing npm resolution in memory where the underlying
/// snapshot can be updated concurrently. Additionally handles updating the lockfile
/// based on changes to the resolution.
///
/// This does not interact with the file system.
#[derive(Clone)]
pub struct NpmResolution(Arc<NpmResolutionInner>);
struct NpmResolutionInner {
api: NpmRegistry,
snapshot: RwLock<NpmResolutionSnapshot>,
update_queue: TaskQueue,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
}
impl std::fmt::Debug for NpmResolution {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let snapshot = self.0.snapshot.read();
f.debug_struct("NpmResolution")
.field("snapshot", &snapshot)
.finish()
}
}
impl NpmResolution {
pub fn new(
api: NpmRegistry,
initial_snapshot: Option<NpmResolutionSnapshot>,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
) -> Self {
Self(Arc::new(NpmResolutionInner {
api,
snapshot: RwLock::new(initial_snapshot.unwrap_or_default()),
update_queue: Default::default(),
maybe_lockfile,
}))
}
pub async fn add_package_reqs(
&self,
package_reqs: Vec<NpmPackageReq>,
) -> Result<(), AnyError> {
let inner = &self.0;
// only allow one thread in here at a time
let _permit = inner.update_queue.acquire().await;
let snapshot = inner.snapshot.read().clone();
let snapshot = add_package_reqs_to_snapshot(
&inner.api,
package_reqs,
snapshot,
self.0.maybe_lockfile.clone(),
)
.await?;
*inner.snapshot.write() = snapshot;
Ok(())
}
pub async fn set_package_reqs(
&self,
package_reqs: Vec<NpmPackageReq>,
) -> Result<(), AnyError> {
let inner = &self.0;
// only allow one thread in here at a time
let _permit = inner.update_queue.acquire().await;
let snapshot = inner.snapshot.read().clone();
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
let has_removed_package = !snapshot
.package_reqs()
.keys()
.all(|req| reqs_set.contains(req));
// if any packages were removed, we need to completely recreate the npm resolution snapshot
let snapshot = if has_removed_package {
NpmResolutionSnapshot::default()
} else {
snapshot
};
let snapshot = add_package_reqs_to_snapshot(
&inner.api,
package_reqs,
snapshot,
self.0.maybe_lockfile.clone(),
)
.await?;
*inner.snapshot.write() = snapshot;
Ok(())
}
pub async fn resolve_pending(&self) -> Result<(), AnyError> {
let inner = &self.0;
// only allow one thread in here at a time
let _permit = inner.update_queue.acquire().await;
let snapshot = inner.snapshot.read().clone();
let snapshot = add_package_reqs_to_snapshot(
&inner.api,
Vec::new(),
snapshot,
self.0.maybe_lockfile.clone(),
)
.await?;
*inner.snapshot.write() = snapshot;
Ok(())
}
pub fn pkg_req_ref_to_nv_ref(
&self,
req_ref: NpmPackageReqReference,
) -> Result<NpmPackageNvReference, AnyError> {
let node_id = self.resolve_pkg_id_from_pkg_req(&req_ref.req)?;
Ok(NpmPackageNvReference {
nv: node_id.nv,
sub_path: req_ref.sub_path,
})
}
pub fn resolve_package_cache_folder_id_from_id(
&self,
id: &NpmPackageId,
) -> Option<NpmPackageCacheFolderId> {
self
.0
.snapshot
.read()
.package_from_id(id)
.map(|p| p.get_package_cache_folder_id())
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageCacheFolderId,
) -> Result<NpmResolutionPackage, AnyError> {
self
.0
.snapshot
.read()
.resolve_package_from_package(name, referrer)
.cloned()
}
/// Resolve a node package from a deno module.
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &NpmPackageReq,
) -> Result<NpmPackageId, AnyError> {
self
.0
.snapshot
.read()
.resolve_pkg_from_pkg_req(req)
.map(|pkg| pkg.pkg_id.clone())
}
pub fn resolve_pkg_id_from_deno_module(
&self,
id: &NpmPackageNv,
) -> Result<NpmPackageId, AnyError> {
self
.0
.snapshot
.read()
.resolve_package_from_deno_module(id)
.map(|pkg| pkg.pkg_id.clone())
}
/// Resolves a package requirement for deno graph. This should only be
/// called by deno_graph's NpmResolver or for resolving packages in
/// a package.json
pub fn resolve_package_req_as_pending(
&self,
pkg_req: &NpmPackageReq,
) -> Result<NpmPackageNv, AnyError> {
// we should always have this because it should have been cached before here
let package_info =
self.0.api.get_cached_package_info(&pkg_req.name).unwrap();
self.resolve_package_req_as_pending_with_info(pkg_req, &package_info)
}
/// Resolves a package requirement for deno graph. This should only be
/// called by deno_graph's NpmResolver or for resolving packages in
/// a package.json
pub fn resolve_package_req_as_pending_with_info(
&self,
pkg_req: &NpmPackageReq,
package_info: &NpmPackageInfo,
) -> Result<NpmPackageNv, AnyError> {
debug_assert_eq!(pkg_req.name, package_info.name);
let inner = &self.0;
let mut snapshot = inner.snapshot.write();
let nv = snapshot.resolve_package_req_as_pending(pkg_req, package_info)?;
Ok(nv)
}
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
self.0.snapshot.read().all_packages_partitioned()
}
pub fn has_packages(&self) -> bool {
!self.0.snapshot.read().is_empty()
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.0.snapshot.read().clone()
}
pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> {
let snapshot = self.0.snapshot.read();
populate_lockfile_from_snapshot(lockfile, &snapshot)
}
}
async fn add_package_reqs_to_snapshot(
api: &NpmRegistry,
package_reqs: Vec<NpmPackageReq>,
snapshot: NpmResolutionSnapshot,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
) -> Result<NpmResolutionSnapshot, AnyError> {
if !snapshot.has_pending()
&& package_reqs
.iter()
.all(|req| snapshot.package_reqs().contains_key(req))
{
return Ok(snapshot); // already up to date
}
let result = snapshot.resolve_pending(package_reqs, api).await;
api.clear_memory_cache();
let snapshot = result?; // propagate the error after clearing the memory cache
if let Some(lockfile_mutex) = maybe_lockfile {
let mut lockfile = lockfile_mutex.lock();
populate_lockfile_from_snapshot(&mut lockfile, &snapshot)?;
Ok(snapshot)
} else {
Ok(snapshot)
}
}
fn populate_lockfile_from_snapshot(
lockfile: &mut Lockfile,
snapshot: &NpmResolutionSnapshot,
) -> Result<(), AnyError> {
for (package_req, nv) in snapshot.package_reqs() {
lockfile.insert_npm_specifier(
package_req.to_string(),
snapshot
.resolve_package_from_deno_module(nv)
.unwrap()
.pkg_id
.as_serialized(),
);
}
for package in snapshot.all_packages() {
lockfile
.check_or_insert_npm_package(npm_package_to_lockfile_info(package))?;
}
Ok(())
}
fn npm_package_to_lockfile_info(
pkg: NpmResolutionPackage,
) -> NpmPackageLockfileInfo {
let dependencies = pkg
.dependencies
.into_iter()
.map(|(name, id)| NpmPackageDependencyLockfileInfo {
name,
id: id.as_serialized(),
})
.collect();
NpmPackageLockfileInfo {
display_id: pkg.pkg_id.nv.to_string(),
serialized_id: pkg.pkg_id.as_serialized(),
integrity: pkg.dist.integrity().to_string(),
dependencies,
}
}

View file

@ -1,243 +0,0 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::semver::Version;
use deno_graph::semver::VersionReq;
use once_cell::sync::Lazy;
use super::NpmPackageId;
use crate::npm::registry::NpmPackageInfo;
use crate::npm::registry::NpmPackageVersionInfo;
pub static LATEST_VERSION_REQ: Lazy<VersionReq> =
Lazy::new(|| VersionReq::parse_from_specifier("latest").unwrap());
pub fn resolve_best_package_version_and_info<'info, 'version>(
version_req: &VersionReq,
package_info: &'info NpmPackageInfo,
existing_versions: impl Iterator<Item = &'version Version>,
) -> Result<VersionAndInfo<'info>, AnyError> {
if let Some(version) = resolve_best_from_existing_versions(
version_req,
package_info,
existing_versions,
)? {
match package_info.versions.get(&version.to_string()) {
Some(version_info) => Ok(VersionAndInfo {
version,
info: version_info,
}),
None => {
bail!(
"could not find version '{}' for '{}'",
version,
&package_info.name
)
}
}
} else {
// get the information
get_resolved_package_version_and_info(version_req, package_info, None)
}
}
#[derive(Clone)]
pub struct VersionAndInfo<'a> {
pub version: Version,
pub info: &'a NpmPackageVersionInfo,
}
fn get_resolved_package_version_and_info<'a>(
version_req: &VersionReq,
info: &'a NpmPackageInfo,
parent: Option<&NpmPackageId>,
) -> Result<VersionAndInfo<'a>, AnyError> {
if let Some(tag) = version_req.tag() {
tag_to_version_info(info, tag, parent)
} else {
let mut maybe_best_version: Option<VersionAndInfo> = None;
for version_info in info.versions.values() {
let version = Version::parse_from_npm(&version_info.version)?;
if version_req.matches(&version) {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| best_version.version.cmp(&version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(VersionAndInfo {
version,
info: version_info,
});
}
}
}
match maybe_best_version {
Some(v) => Ok(v),
// If the package isn't found, it likely means that the user needs to use
// `--reload` to get the latest npm package information. Although it seems
// like we could make this smart by fetching the latest information for
// this package here, we really need a full restart. There could be very
// interesting bugs that occur if this package's version was resolved by
// something previous using the old information, then now being smart here
// causes a new fetch of the package information, meaning this time the
// previous resolution of this package's version resolved to an older
// version, but next time to a different version because it has new information.
None => bail!(
concat!(
"Could not find npm package '{}' matching {}{}. ",
"Try retrieving the latest npm package information by running with --reload",
),
info.name,
version_req.version_text(),
match parent {
Some(resolved_id) => format!(" as specified in {}", resolved_id.nv),
None => String::new(),
}
),
}
}
}
pub fn version_req_satisfies(
version_req: &VersionReq,
version: &Version,
package_info: &NpmPackageInfo,
parent: Option<&NpmPackageId>,
) -> Result<bool, AnyError> {
match version_req.tag() {
Some(tag) => {
let tag_version = tag_to_version_info(package_info, tag, parent)?.version;
Ok(tag_version == *version)
}
None => Ok(version_req.matches(version)),
}
}
fn resolve_best_from_existing_versions<'a>(
version_req: &VersionReq,
package_info: &NpmPackageInfo,
existing_versions: impl Iterator<Item = &'a Version>,
) -> Result<Option<Version>, AnyError> {
let mut maybe_best_version: Option<&Version> = None;
for version in existing_versions {
if version_req_satisfies(version_req, version, package_info, None)? {
let is_best_version = maybe_best_version
.as_ref()
.map(|best_version| (*best_version).cmp(version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_version = Some(version);
}
}
}
Ok(maybe_best_version.cloned())
}
fn tag_to_version_info<'a>(
info: &'a NpmPackageInfo,
tag: &str,
parent: Option<&NpmPackageId>,
) -> Result<VersionAndInfo<'a>, AnyError> {
// For when someone just specifies @types/node, we want to pull in a
// "known good" version of @types/node that works well with Deno and
// not necessarily the latest version. For example, we might only be
// compatible with Node vX, but then Node vY is published so we wouldn't
// want to pull that in.
// Note: If the user doesn't want this behavior, then they can specify an
// explicit version.
if tag == "latest" && info.name == "@types/node" {
return get_resolved_package_version_and_info(
// WARNING: When bumping this version, check if anything needs to be
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
&VersionReq::parse_from_npm("18.0.0 - 18.11.18").unwrap(),
info,
parent,
);
}
if let Some(version) = info.dist_tags.get(tag) {
match info.versions.get(version) {
Some(info) => Ok(VersionAndInfo {
version: Version::parse_from_npm(version)?,
info,
}),
None => {
bail!(
"Could not find version '{}' referenced in dist-tag '{}'.",
version,
tag,
)
}
}
} else {
bail!("Could not find dist-tag '{}'.", tag)
}
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use deno_graph::npm::NpmPackageReqReference;
use super::*;
#[test]
fn test_get_resolved_package_version_and_info() {
// dist tag where version doesn't exist
let package_ref = NpmPackageReqReference::from_str("npm:test").unwrap();
let package_info = NpmPackageInfo {
name: "test".to_string(),
versions: HashMap::new(),
dist_tags: HashMap::from([(
"latest".to_string(),
"1.0.0-alpha".to_string(),
)]),
};
let result = get_resolved_package_version_and_info(
package_ref
.req
.version_req
.as_ref()
.unwrap_or(&*LATEST_VERSION_REQ),
&package_info,
None,
);
assert_eq!(
result.err().unwrap().to_string(),
"Could not find version '1.0.0-alpha' referenced in dist-tag 'latest'."
);
// dist tag where version is a pre-release
let package_ref = NpmPackageReqReference::from_str("npm:test").unwrap();
let package_info = NpmPackageInfo {
name: "test".to_string(),
versions: HashMap::from([
("0.1.0".to_string(), NpmPackageVersionInfo::default()),
(
"1.0.0-alpha".to_string(),
NpmPackageVersionInfo {
version: "0.1.0-alpha".to_string(),
..Default::default()
},
),
]),
dist_tags: HashMap::from([(
"latest".to_string(),
"1.0.0-alpha".to_string(),
)]),
};
let result = get_resolved_package_version_and_info(
package_ref
.req
.version_req
.as_ref()
.unwrap_or(&*LATEST_VERSION_REQ),
&package_info,
None,
);
assert_eq!(result.unwrap().version.to_string(), "1.0.0-alpha");
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,636 +0,0 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::RwLock;
use deno_core::TaskQueue;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageNvReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::semver::Version;
use log::debug;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
use crate::args::Lockfile;
use crate::npm::resolution::common::LATEST_VERSION_REQ;
use self::common::resolve_best_package_version_and_info;
use self::graph::GraphDependencyResolver;
use self::snapshot::NpmPackagesPartitioned;
use super::cache::NpmPackageCacheFolderId;
use super::registry::NpmPackageVersionDistInfo;
use super::registry::NpmRegistryApi;
mod common;
mod graph;
mod snapshot;
use graph::Graph;
pub use snapshot::NpmResolutionSnapshot;
#[derive(Debug, Error)]
#[error("Invalid npm package id '{text}'. {message}")]
pub struct NpmPackageNodeIdDeserializationError {
message: String,
text: String,
}
/// A resolved unique identifier for an npm package. This contains
/// the resolved name, version, and peer dependency resolution identifiers.
#[derive(Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct NpmPackageId {
pub nv: NpmPackageNv,
pub peer_dependencies: Vec<NpmPackageId>,
}
// Custom debug implementation for more concise test output
impl std::fmt::Debug for NpmPackageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_serialized())
}
}
impl NpmPackageId {
pub fn as_serialized(&self) -> String {
self.as_serialized_with_level(0)
}
fn as_serialized_with_level(&self, level: usize) -> String {
// WARNING: This should not change because it's used in the lockfile
let mut result = format!(
"{}@{}",
if level == 0 {
self.nv.name.to_string()
} else {
self.nv.name.replace('/', "+")
},
self.nv.version
);
for peer in &self.peer_dependencies {
// unfortunately we can't do something like `_3` when
// this gets deep because npm package names can start
// with a number
result.push_str(&"_".repeat(level + 1));
result.push_str(&peer.as_serialized_with_level(level + 1));
}
result
}
pub fn from_serialized(
id: &str,
) -> Result<Self, NpmPackageNodeIdDeserializationError> {
use monch::*;
fn parse_name(input: &str) -> ParseResult<&str> {
if_not_empty(substring(move |input| {
for (pos, c) in input.char_indices() {
// first character might be a scope, so skip it
if pos > 0 && c == '@' {
return Ok((&input[pos..], ()));
}
}
ParseError::backtrace()
}))(input)
}
fn parse_version(input: &str) -> ParseResult<&str> {
if_not_empty(substring(skip_while(|c| c != '_')))(input)
}
fn parse_name_and_version(input: &str) -> ParseResult<(String, Version)> {
let (input, name) = parse_name(input)?;
let (input, _) = ch('@')(input)?;
let at_version_input = input;
let (input, version) = parse_version(input)?;
match Version::parse_from_npm(version) {
Ok(version) => Ok((input, (name.to_string(), version))),
Err(err) => ParseError::fail(at_version_input, format!("{err:#}")),
}
}
fn parse_level_at_level<'a>(
level: usize,
) -> impl Fn(&'a str) -> ParseResult<'a, ()> {
fn parse_level(input: &str) -> ParseResult<usize> {
let level = input.chars().take_while(|c| *c == '_').count();
Ok((&input[level..], level))
}
move |input| {
let (input, parsed_level) = parse_level(input)?;
if parsed_level == level {
Ok((input, ()))
} else {
ParseError::backtrace()
}
}
}
fn parse_peers_at_level<'a>(
level: usize,
) -> impl Fn(&'a str) -> ParseResult<'a, Vec<NpmPackageId>> {
move |mut input| {
let mut peers = Vec::new();
while let Ok((level_input, _)) = parse_level_at_level(level)(input) {
input = level_input;
let peer_result = parse_id_at_level(level)(input)?;
input = peer_result.0;
peers.push(peer_result.1);
}
Ok((input, peers))
}
}
fn parse_id_at_level<'a>(
level: usize,
) -> impl Fn(&'a str) -> ParseResult<'a, NpmPackageId> {
move |input| {
let (input, (name, version)) = parse_name_and_version(input)?;
let name = if level > 0 {
name.replace('+', "/")
} else {
name
};
let (input, peer_dependencies) =
parse_peers_at_level(level + 1)(input)?;
Ok((
input,
NpmPackageId {
nv: NpmPackageNv { name, version },
peer_dependencies,
},
))
}
}
with_failure_handling(parse_id_at_level(0))(id).map_err(|err| {
NpmPackageNodeIdDeserializationError {
message: format!("{err:#}"),
text: id.to_string(),
}
})
}
}
impl Ord for NpmPackageId {
fn cmp(&self, other: &Self) -> Ordering {
match self.nv.cmp(&other.nv) {
Ordering::Equal => self.peer_dependencies.cmp(&other.peer_dependencies),
ordering => ordering,
}
}
}
impl PartialOrd for NpmPackageId {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmResolutionPackage {
pub pkg_id: NpmPackageId,
/// The peer dependency resolution can differ for the same
/// package (name and version) depending on where it is in
/// the resolution tree. This copy index indicates which
/// copy of the package this is.
pub copy_index: usize,
pub dist: NpmPackageVersionDistInfo,
/// Key is what the package refers to the other package as,
/// which could be different from the package name.
pub dependencies: HashMap<String, NpmPackageId>,
}
impl std::fmt::Debug for NpmResolutionPackage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// custom debug implementation for deterministic output in the tests
f.debug_struct("NpmResolutionPackage")
.field("pkg_id", &self.pkg_id)
.field("copy_index", &self.copy_index)
.field("dist", &self.dist)
.field(
"dependencies",
&self.dependencies.iter().collect::<BTreeMap<_, _>>(),
)
.finish()
}
}
impl NpmResolutionPackage {
pub fn get_package_cache_folder_id(&self) -> NpmPackageCacheFolderId {
NpmPackageCacheFolderId {
nv: self.pkg_id.nv.clone(),
copy_index: self.copy_index,
}
}
}
/// Handles updating and storing npm resolution in memory.
///
/// This does not interact with the file system.
#[derive(Clone)]
pub struct NpmResolution(Arc<NpmResolutionInner>);
struct NpmResolutionInner {
api: NpmRegistryApi,
snapshot: RwLock<NpmResolutionSnapshot>,
update_queue: TaskQueue,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
}
impl std::fmt::Debug for NpmResolution {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let snapshot = self.0.snapshot.read();
f.debug_struct("NpmResolution")
.field("snapshot", &snapshot)
.finish()
}
}
impl NpmResolution {
pub fn new(
api: NpmRegistryApi,
initial_snapshot: Option<NpmResolutionSnapshot>,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
) -> Self {
Self(Arc::new(NpmResolutionInner {
api,
snapshot: RwLock::new(initial_snapshot.unwrap_or_default()),
update_queue: Default::default(),
maybe_lockfile,
}))
}
pub async fn add_package_reqs(
&self,
package_reqs: Vec<NpmPackageReq>,
) -> Result<(), AnyError> {
let inner = &self.0;
// only allow one thread in here at a time
let _permit = inner.update_queue.acquire().await;
let snapshot = inner.snapshot.read().clone();
let snapshot = add_package_reqs_to_snapshot(
&inner.api,
package_reqs,
snapshot,
self.0.maybe_lockfile.clone(),
)
.await?;
*inner.snapshot.write() = snapshot;
Ok(())
}
pub async fn set_package_reqs(
&self,
package_reqs: Vec<NpmPackageReq>,
) -> Result<(), AnyError> {
let inner = &self.0;
// only allow one thread in here at a time
let _permit = inner.update_queue.acquire().await;
let snapshot = inner.snapshot.read().clone();
let reqs_set = package_reqs.iter().collect::<HashSet<_>>();
let has_removed_package = !snapshot
.package_reqs
.keys()
.all(|req| reqs_set.contains(req));
// if any packages were removed, we need to completely recreate the npm resolution snapshot
let snapshot = if has_removed_package {
NpmResolutionSnapshot::default()
} else {
snapshot
};
let snapshot = add_package_reqs_to_snapshot(
&inner.api,
package_reqs,
snapshot,
self.0.maybe_lockfile.clone(),
)
.await?;
*inner.snapshot.write() = snapshot;
Ok(())
}
pub async fn resolve_pending(&self) -> Result<(), AnyError> {
let inner = &self.0;
// only allow one thread in here at a time
let _permit = inner.update_queue.acquire().await;
let snapshot = inner.snapshot.read().clone();
let snapshot = add_package_reqs_to_snapshot(
&inner.api,
Vec::new(),
snapshot,
self.0.maybe_lockfile.clone(),
)
.await?;
*inner.snapshot.write() = snapshot;
Ok(())
}
pub fn pkg_req_ref_to_nv_ref(
&self,
req_ref: NpmPackageReqReference,
) -> Result<NpmPackageNvReference, AnyError> {
let node_id = self.resolve_pkg_id_from_pkg_req(&req_ref.req)?;
Ok(NpmPackageNvReference {
nv: node_id.nv,
sub_path: req_ref.sub_path,
})
}
pub fn resolve_package_cache_folder_id_from_id(
&self,
id: &NpmPackageId,
) -> Option<NpmPackageCacheFolderId> {
self
.0
.snapshot
.read()
.package_from_id(id)
.map(|p| p.get_package_cache_folder_id())
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageCacheFolderId,
) -> Result<NpmResolutionPackage, AnyError> {
self
.0
.snapshot
.read()
.resolve_package_from_package(name, referrer)
.cloned()
}
/// Resolve a node package from a deno module.
pub fn resolve_pkg_id_from_pkg_req(
&self,
req: &NpmPackageReq,
) -> Result<NpmPackageId, AnyError> {
self
.0
.snapshot
.read()
.resolve_pkg_from_pkg_req(req)
.map(|pkg| pkg.pkg_id.clone())
}
pub fn resolve_pkg_id_from_deno_module(
&self,
id: &NpmPackageNv,
) -> Result<NpmPackageId, AnyError> {
self
.0
.snapshot
.read()
.resolve_package_from_deno_module(id)
.map(|pkg| pkg.pkg_id.clone())
}
/// Resolves a package requirement for deno graph. This should only be
/// called by deno_graph's NpmResolver or for resolving packages in
/// a package.json
pub fn resolve_package_req_as_pending(
&self,
pkg_req: &NpmPackageReq,
) -> Result<NpmPackageNv, AnyError> {
let inner = &self.0;
// we should always have this because it should have been cached before here
let package_info =
inner.api.get_cached_package_info(&pkg_req.name).unwrap();
let mut snapshot = inner.snapshot.write();
let version_req =
pkg_req.version_req.as_ref().unwrap_or(&*LATEST_VERSION_REQ);
let version_and_info =
match snapshot.packages_by_name.get(&package_info.name) {
Some(existing_versions) => resolve_best_package_version_and_info(
version_req,
&package_info,
existing_versions.iter().map(|p| &p.nv.version),
)?,
None => resolve_best_package_version_and_info(
version_req,
&package_info,
Vec::new().iter(),
)?,
};
let id = NpmPackageNv {
name: package_info.name.to_string(),
version: version_and_info.version,
};
debug!(
"Resolved {}@{} to {}",
pkg_req.name,
version_req.version_text(),
id.to_string(),
);
snapshot.package_reqs.insert(pkg_req.clone(), id.clone());
let packages_with_name = snapshot
.packages_by_name
.entry(package_info.name.clone())
.or_default();
if !packages_with_name.iter().any(|p| p.nv == id) {
packages_with_name.push(NpmPackageId {
nv: id.clone(),
peer_dependencies: Vec::new(),
});
}
snapshot.pending_unresolved_packages.push(id.clone());
Ok(id)
}
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
self.0.snapshot.read().all_packages_partitioned()
}
pub fn has_packages(&self) -> bool {
!self.0.snapshot.read().packages.is_empty()
}
pub fn snapshot(&self) -> NpmResolutionSnapshot {
self.0.snapshot.read().clone()
}
pub fn lock(&self, lockfile: &mut Lockfile) -> Result<(), AnyError> {
let snapshot = self.0.snapshot.read();
for (package_req, nv) in snapshot.package_reqs.iter() {
lockfile.insert_npm_specifier(
package_req.to_string(),
snapshot.root_packages.get(nv).unwrap().as_serialized(),
);
}
for package in snapshot.all_packages() {
lockfile.check_or_insert_npm_package(package.into())?;
}
Ok(())
}
}
async fn add_package_reqs_to_snapshot(
api: &NpmRegistryApi,
package_reqs: Vec<NpmPackageReq>,
snapshot: NpmResolutionSnapshot,
maybe_lockfile: Option<Arc<Mutex<Lockfile>>>,
) -> Result<NpmResolutionSnapshot, AnyError> {
if snapshot.pending_unresolved_packages.is_empty()
&& package_reqs
.iter()
.all(|req| snapshot.package_reqs.contains_key(req))
{
return Ok(snapshot); // already up to date
}
// convert the snapshot to a traversable graph
let mut graph = Graph::from_snapshot(snapshot).with_context(|| {
deno_core::anyhow::anyhow!(
"Failed creating npm state. Try recreating your lockfile."
)
})?;
let pending_unresolved = graph.take_pending_unresolved();
// avoid loading the info if this is already in the graph
let package_reqs = package_reqs
.into_iter()
.filter(|r| !graph.has_package_req(r))
.collect::<Vec<_>>();
let pending_unresolved = pending_unresolved
.into_iter()
.filter(|p| !graph.has_root_package(p))
.collect::<Vec<_>>();
// cache the packages in parallel
api
.cache_in_parallel(
package_reqs
.iter()
.map(|req| req.name.clone())
.chain(pending_unresolved.iter().map(|id| id.name.clone()))
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>(),
)
.await?;
// go over the top level package names first (npm package reqs and pending unresolved),
// then down the tree one level at a time through all the branches
let mut resolver = GraphDependencyResolver::new(&mut graph, api);
// The package reqs and ids should already be sorted
// in the order they should be resolved in.
for package_req in package_reqs {
let info = api.package_info(&package_req.name).await?;
resolver.add_package_req(&package_req, &info)?;
}
for pkg_id in pending_unresolved {
let info = api.package_info(&pkg_id.name).await?;
resolver.add_root_package(&pkg_id, &info)?;
}
resolver.resolve_pending().await?;
let result = graph.into_snapshot(api).await;
api.clear_memory_cache();
if let Some(lockfile_mutex) = maybe_lockfile {
let mut lockfile = lockfile_mutex.lock();
match result {
Ok(snapshot) => {
for (package_req, nv) in snapshot.package_reqs.iter() {
lockfile.insert_npm_specifier(
package_req.to_string(),
snapshot.root_packages.get(nv).unwrap().as_serialized(),
);
}
for package in snapshot.all_packages() {
lockfile.check_or_insert_npm_package(package.into())?;
}
Ok(snapshot)
}
Err(err) => Err(err),
}
} else {
result
}
}
#[cfg(test)]
mod test {
use deno_graph::npm::NpmPackageNv;
use deno_graph::semver::Version;
use super::NpmPackageId;
#[test]
fn serialize_npm_package_id() {
let id = NpmPackageId {
nv: NpmPackageNv {
name: "pkg-a".to_string(),
version: Version::parse_from_npm("1.2.3").unwrap(),
},
peer_dependencies: vec![
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-b".to_string(),
version: Version::parse_from_npm("3.2.1").unwrap(),
},
peer_dependencies: vec![
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-c".to_string(),
version: Version::parse_from_npm("1.3.2").unwrap(),
},
peer_dependencies: vec![],
},
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-d".to_string(),
version: Version::parse_from_npm("2.3.4").unwrap(),
},
peer_dependencies: vec![],
},
],
},
NpmPackageId {
nv: NpmPackageNv {
name: "pkg-e".to_string(),
version: Version::parse_from_npm("2.3.1").unwrap(),
},
peer_dependencies: vec![NpmPackageId {
nv: NpmPackageNv {
name: "pkg-f".to_string(),
version: Version::parse_from_npm("2.3.1").unwrap(),
},
peer_dependencies: vec![],
}],
},
],
};
// this shouldn't change because it's used in the lockfile
let serialized = id.as_serialized();
assert_eq!(serialized, "pkg-a@1.2.3_pkg-b@3.2.1__pkg-c@1.3.2__pkg-d@2.3.4_pkg-e@2.3.1__pkg-f@2.3.1");
assert_eq!(NpmPackageId::from_serialized(&serialized).unwrap(), id);
}
}

View file

@ -1,492 +0,0 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::sync::Arc;
use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
use deno_graph::semver::VersionReq;
use serde::Deserialize;
use serde::Serialize;
use crate::args::Lockfile;
use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::registry::NpmPackageVersionDistInfo;
use crate::npm::registry::NpmRegistryApi;
use super::NpmPackageId;
use super::NpmResolutionPackage;
/// Packages partitioned by if they are "copy" packages or not.
pub struct NpmPackagesPartitioned {
pub packages: Vec<NpmResolutionPackage>,
/// Since peer dependency resolution occurs based on ancestors and ancestor
/// siblings, this may sometimes cause the same package (name and version)
/// to have different dependencies based on where it appears in the tree.
/// For these packages, we create a "copy package" or duplicate of the package
/// whose dependencies are that of where in the tree they've resolved to.
pub copy_packages: Vec<NpmResolutionPackage>,
}
impl NpmPackagesPartitioned {
pub fn into_all(self) -> Vec<NpmResolutionPackage> {
let mut packages = self.packages;
packages.extend(self.copy_packages);
packages
}
}
#[derive(Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct NpmResolutionSnapshot {
/// The unique package requirements map to a single npm package name and version.
#[serde(with = "map_to_vec")]
pub(super) package_reqs: HashMap<NpmPackageReq, NpmPackageNv>,
// Each root level npm package name and version maps to an exact npm package node id.
#[serde(with = "map_to_vec")]
pub(super) root_packages: HashMap<NpmPackageNv, NpmPackageId>,
pub(super) packages_by_name: HashMap<String, Vec<NpmPackageId>>,
#[serde(with = "map_to_vec")]
pub(super) packages: HashMap<NpmPackageId, NpmResolutionPackage>,
/// Ordered list based on resolution of packages whose dependencies
/// have not yet been resolved
pub(super) pending_unresolved_packages: Vec<NpmPackageNv>,
}
impl std::fmt::Debug for NpmResolutionSnapshot {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// do a custom debug implementation that creates deterministic output for the tests
f.debug_struct("NpmResolutionSnapshot")
.field(
"package_reqs",
&self.package_reqs.iter().collect::<BTreeMap<_, _>>(),
)
.field(
"root_packages",
&self.root_packages.iter().collect::<BTreeMap<_, _>>(),
)
.field(
"packages_by_name",
&self.packages_by_name.iter().collect::<BTreeMap<_, _>>(),
)
.field(
"packages",
&self.packages.iter().collect::<BTreeMap<_, _>>(),
)
.field(
"pending_unresolved_packages",
&self.pending_unresolved_packages,
)
.finish()
}
}
// This is done so the maps with non-string keys get serialized and deserialized as vectors.
// Adapted from: https://github.com/serde-rs/serde/issues/936#issuecomment-302281792
mod map_to_vec {
use std::collections::HashMap;
use serde::de::Deserialize;
use serde::de::Deserializer;
use serde::ser::Serializer;
use serde::Serialize;
pub fn serialize<S, K: Serialize, V: Serialize>(
map: &HashMap<K, V>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_seq(map.iter())
}
pub fn deserialize<
'de,
D,
K: Deserialize<'de> + Eq + std::hash::Hash,
V: Deserialize<'de>,
>(
deserializer: D,
) -> Result<HashMap<K, V>, D::Error>
where
D: Deserializer<'de>,
{
let mut map = HashMap::new();
for (key, value) in Vec::<(K, V)>::deserialize(deserializer)? {
map.insert(key, value);
}
Ok(map)
}
}
impl NpmResolutionSnapshot {
/// Gets if this snapshot is empty.
pub fn is_empty(&self) -> bool {
self.packages.is_empty() && self.pending_unresolved_packages.is_empty()
}
/// Resolve a package from a package requirement.
pub fn resolve_pkg_from_pkg_req(
&self,
req: &NpmPackageReq,
) -> Result<&NpmResolutionPackage, AnyError> {
match self.package_reqs.get(req) {
Some(id) => self.resolve_package_from_deno_module(id),
None => bail!("could not find npm package directory for '{}'", req),
}
}
/// Resolve a package from a deno module.
pub fn resolve_package_from_deno_module(
&self,
id: &NpmPackageNv,
) -> Result<&NpmResolutionPackage, AnyError> {
match self.root_packages.get(id) {
Some(id) => Ok(self.packages.get(id).unwrap()),
None => bail!("could not find npm package directory for '{}'", id),
}
}
pub fn top_level_packages(&self) -> Vec<NpmPackageId> {
self.root_packages.values().cloned().collect::<Vec<_>>()
}
pub fn package_from_id(
&self,
id: &NpmPackageId,
) -> Option<&NpmResolutionPackage> {
self.packages.get(id)
}
pub fn resolve_package_from_package(
&self,
name: &str,
referrer: &NpmPackageCacheFolderId,
) -> Result<&NpmResolutionPackage, AnyError> {
// todo(dsherret): do we need an additional hashmap to get this quickly?
let referrer_package = self
.packages_by_name
.get(&referrer.nv.name)
.and_then(|packages| {
packages
.iter()
.filter(|p| p.nv.version == referrer.nv.version)
.filter_map(|node_id| {
let package = self.packages.get(node_id)?;
if package.copy_index == referrer.copy_index {
Some(package)
} else {
None
}
})
.next()
})
.ok_or_else(|| {
anyhow!("could not find referrer npm package '{}'", referrer)
})?;
let name = name_without_path(name);
if let Some(id) = referrer_package.dependencies.get(name) {
return Ok(self.packages.get(id).unwrap());
}
if referrer_package.pkg_id.nv.name == name {
return Ok(referrer_package);
}
// TODO(bartlomieju): this should use a reverse lookup table in the
// snapshot instead of resolving best version again.
let any_version_req = VersionReq::parse_from_npm("*").unwrap();
if let Some(id) = self.resolve_best_package_id(name, &any_version_req) {
if let Some(pkg) = self.packages.get(&id) {
return Ok(pkg);
}
}
bail!(
"could not find npm package '{}' referenced by '{}'",
name,
referrer
)
}
pub fn all_packages(&self) -> Vec<NpmResolutionPackage> {
self.packages.values().cloned().collect()
}
pub fn all_packages_partitioned(&self) -> NpmPackagesPartitioned {
let mut packages = self.all_packages();
let mut copy_packages = Vec::with_capacity(packages.len() / 2); // at most 1 copy for every package
// partition out any packages that are "copy" packages
for i in (0..packages.len()).rev() {
if packages[i].copy_index > 0 {
copy_packages.push(packages.swap_remove(i));
}
}
NpmPackagesPartitioned {
packages,
copy_packages,
}
}
pub fn resolve_best_package_id(
&self,
name: &str,
version_req: &VersionReq,
) -> Option<NpmPackageId> {
// todo(dsherret): this is not exactly correct because some ids
// will be better than others due to peer dependencies
let mut maybe_best_id: Option<&NpmPackageId> = None;
if let Some(node_ids) = self.packages_by_name.get(name) {
for node_id in node_ids.iter() {
if version_req.matches(&node_id.nv.version) {
let is_best_version = maybe_best_id
.as_ref()
.map(|best_id| best_id.nv.version.cmp(&node_id.nv.version).is_lt())
.unwrap_or(true);
if is_best_version {
maybe_best_id = Some(node_id);
}
}
}
}
maybe_best_id.cloned()
}
pub async fn from_lockfile(
lockfile: Arc<Mutex<Lockfile>>,
api: &NpmRegistryApi,
) -> Result<Self, AnyError> {
let mut package_reqs: HashMap<NpmPackageReq, NpmPackageNv>;
let mut root_packages: HashMap<NpmPackageNv, NpmPackageId>;
let mut packages_by_name: HashMap<String, Vec<NpmPackageId>>;
let mut packages: HashMap<NpmPackageId, NpmResolutionPackage>;
let mut copy_index_resolver: SnapshotPackageCopyIndexResolver;
{
let lockfile = lockfile.lock();
// pre-allocate collections
package_reqs =
HashMap::with_capacity(lockfile.content.npm.specifiers.len());
root_packages =
HashMap::with_capacity(lockfile.content.npm.specifiers.len());
let packages_len = lockfile.content.npm.packages.len();
packages = HashMap::with_capacity(packages_len);
packages_by_name = HashMap::with_capacity(packages_len); // close enough
copy_index_resolver =
SnapshotPackageCopyIndexResolver::with_capacity(packages_len);
let mut verify_ids = HashSet::with_capacity(packages_len);
// collect the specifiers to version mappings
for (key, value) in &lockfile.content.npm.specifiers {
let package_req = NpmPackageReq::from_str(key)
.with_context(|| format!("Unable to parse npm specifier: {key}"))?;
let package_id = NpmPackageId::from_serialized(value)?;
package_reqs.insert(package_req, package_id.nv.clone());
root_packages.insert(package_id.nv.clone(), package_id.clone());
verify_ids.insert(package_id.clone());
}
// then the packages
for (key, value) in &lockfile.content.npm.packages {
let package_id = NpmPackageId::from_serialized(key)?;
// collect the dependencies
let mut dependencies = HashMap::default();
packages_by_name
.entry(package_id.nv.name.to_string())
.or_default()
.push(package_id.clone());
for (name, specifier) in &value.dependencies {
let dep_id = NpmPackageId::from_serialized(specifier)?;
dependencies.insert(name.to_string(), dep_id.clone());
verify_ids.insert(dep_id);
}
let package = NpmResolutionPackage {
pkg_id: package_id.clone(),
copy_index: copy_index_resolver.resolve(&package_id),
// temporary dummy value
dist: NpmPackageVersionDistInfo::default(),
dependencies,
};
packages.insert(package_id, package);
}
// verify that all these ids exist in packages
for id in &verify_ids {
if !packages.contains_key(id) {
bail!(
"the lockfile is corrupt. You can recreate it with --lock-write"
);
}
}
}
api
.cache_in_parallel(packages_by_name.keys().cloned().collect())
.await?;
// ensure the dist is set for each package
for package in packages.values_mut() {
// this will read from the memory cache now
let version_info = match api
.package_version_info(&package.pkg_id.nv)
.await?
{
Some(version_info) => version_info,
None => {
bail!("could not find '{}' specified in the lockfile. Maybe try again with --reload", package.pkg_id.nv);
}
};
package.dist = version_info.dist;
}
Ok(Self {
package_reqs,
root_packages,
packages_by_name,
packages,
pending_unresolved_packages: Default::default(),
})
}
}
pub struct SnapshotPackageCopyIndexResolver {
packages_to_copy_index: HashMap<NpmPackageId, usize>,
package_name_version_to_copy_count: HashMap<NpmPackageNv, usize>,
}
impl SnapshotPackageCopyIndexResolver {
pub fn with_capacity(capacity: usize) -> Self {
Self {
packages_to_copy_index: HashMap::with_capacity(capacity),
package_name_version_to_copy_count: HashMap::with_capacity(capacity), // close enough
}
}
pub fn from_map_with_capacity(
mut packages_to_copy_index: HashMap<NpmPackageId, usize>,
capacity: usize,
) -> Self {
let mut package_name_version_to_copy_count =
HashMap::with_capacity(capacity); // close enough
if capacity > packages_to_copy_index.len() {
packages_to_copy_index.reserve(capacity - packages_to_copy_index.len());
}
for (node_id, index) in &packages_to_copy_index {
let entry = package_name_version_to_copy_count
.entry(node_id.nv.clone())
.or_insert(0);
if *entry < *index {
*entry = *index;
}
}
Self {
packages_to_copy_index,
package_name_version_to_copy_count,
}
}
pub fn resolve(&mut self, node_id: &NpmPackageId) -> usize {
if let Some(index) = self.packages_to_copy_index.get(node_id) {
*index
} else {
let index = *self
.package_name_version_to_copy_count
.entry(node_id.nv.clone())
.and_modify(|count| {
*count += 1;
})
.or_insert(0);
self.packages_to_copy_index.insert(node_id.clone(), index);
index
}
}
}
fn name_without_path(name: &str) -> &str {
let mut search_start_index = 0;
if name.starts_with('@') {
if let Some(slash_index) = name.find('/') {
search_start_index = slash_index + 1;
}
}
if let Some(slash_index) = &name[search_start_index..].find('/') {
// get the name up until the path slash
&name[0..search_start_index + slash_index]
} else {
name
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_name_without_path() {
assert_eq!(name_without_path("foo"), "foo");
assert_eq!(name_without_path("@foo/bar"), "@foo/bar");
assert_eq!(name_without_path("@foo/bar/baz"), "@foo/bar");
assert_eq!(name_without_path("@hello"), "@hello");
}
#[test]
fn test_copy_index_resolver() {
let mut copy_index_resolver =
SnapshotPackageCopyIndexResolver::with_capacity(10);
assert_eq!(
copy_index_resolver
.resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
0
);
assert_eq!(
copy_index_resolver
.resolve(&NpmPackageId::from_serialized("package@1.0.0").unwrap()),
0
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
.unwrap()
),
1
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageId::from_serialized(
"package@1.0.0_package-b@1.0.0__package-c@2.0.0"
)
.unwrap()
),
2
);
assert_eq!(
copy_index_resolver.resolve(
&NpmPackageId::from_serialized("package@1.0.0_package-b@1.0.0")
.unwrap()
),
1
);
assert_eq!(
copy_index_resolver
.resolve(&NpmPackageId::from_serialized("package-b@1.0.0").unwrap()),
0
);
}
}

View file

@ -9,13 +9,13 @@ use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::url::Url;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeResolutionMode;
use crate::npm::cache::should_sync_download;
use crate::npm::NpmCache;
use crate::npm::NpmPackageId;
use crate::npm::NpmResolutionPackage;
/// Part of the resolution that interacts with the file system.
#[async_trait]

View file

@ -9,15 +9,15 @@ use async_trait::async_trait;
use deno_ast::ModuleSpecifier;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeResolutionMode;
use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::resolution::NpmResolution;
use crate::npm::resolvers::common::cache_packages;
use crate::npm::NpmCache;
use crate::npm::NpmPackageId;
use crate::npm::NpmResolutionPackage;
use super::common::ensure_registry_read_permission;
use super::common::types_package_name;

View file

@ -19,6 +19,9 @@ use deno_core::anyhow::bail;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::url::Url;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageCacheFolderId;
use deno_npm::NpmPackageId;
use deno_runtime::deno_core::futures;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeResolutionMode;
@ -27,11 +30,8 @@ use tokio::task::JoinHandle;
use crate::npm::cache::mixed_case_package_name_encode;
use crate::npm::cache::should_sync_download;
use crate::npm::cache::NpmPackageCacheFolderId;
use crate::npm::resolution::NpmResolution;
use crate::npm::resolution::NpmResolutionSnapshot;
use crate::npm::NpmCache;
use crate::npm::NpmPackageId;
use crate::util::fs::copy_dir_recursive;
use crate::util::fs::hard_link_dir_recursive;
@ -386,12 +386,7 @@ async fn sync_resolution_with_fs(
// node_modules/.deno/<package_id>/node_modules/<package_name>
let mut found_names = HashSet::new();
let mut pending_packages = VecDeque::new();
pending_packages.extend(
snapshot
.top_level_packages()
.into_iter()
.map(|id| (id, true)),
);
pending_packages.extend(snapshot.top_level_packages().map(|id| (id, true)));
while let Some((id, is_top_level)) = pending_packages.pop_front() {
let root_folder_name = if found_names.insert(id.nv.name.clone()) {
id.nv.name.clone()
@ -400,7 +395,7 @@ async fn sync_resolution_with_fs(
} else {
continue; // skip, already handled
};
let package = snapshot.package_from_id(&id).unwrap();
let package = snapshot.package_from_id(id).unwrap();
let local_registry_package_path = join_package_name(
&deno_local_registry_dir
.join(get_package_folder_id_folder_name(
@ -415,7 +410,7 @@ async fn sync_resolution_with_fs(
&join_package_name(root_node_modules_dir_path, &root_folder_name),
)?;
for id in package.dependencies.values() {
pending_packages.push_back((id.clone(), false));
pending_packages.push_back((id, false));
}
}

View file

@ -4,26 +4,29 @@ mod common;
mod global;
mod local;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use deno_ast::ModuleSpecifier;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageNvReference;
use deno_graph::npm::NpmPackageReq;
use deno_graph::npm::NpmPackageReqReference;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_runtime::deno_node::NodePermissions;
use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::deno_node::PathClean;
use deno_runtime::deno_node::RequireNpmResolver;
use deno_semver::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReq;
use deno_semver::npm::NpmPackageReqReference;
use global::GlobalNpmPackageResolver;
use serde::Deserialize;
use serde::Serialize;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use crate::args::Lockfile;
use crate::util::fs::canonicalize_path_maybe_not_exists;
@ -33,8 +36,6 @@ use self::common::NpmPackageFsResolver;
use self::local::LocalNpmPackageResolver;
use super::resolution::NpmResolution;
use super::NpmCache;
use super::NpmPackageId;
use super::NpmResolutionSnapshot;
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]

View file

@ -7,13 +7,13 @@ use std::path::PathBuf;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_graph::npm::NpmPackageNv;
use deno_npm::registry::NpmPackageVersionDistInfo;
use deno_semver::npm::NpmPackageNv;
use flate2::read::GzDecoder;
use tar::Archive;
use tar::EntryType;
use super::cache::with_folder_sync_lock;
use super::registry::NpmPackageVersionDistInfo;
pub fn verify_and_extract_tarball(
package: &NpmPackageNv,
@ -116,7 +116,7 @@ fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> {
#[cfg(test)]
mod test {
use deno_graph::semver::Version;
use deno_semver::Version;
use super::*;

View file

@ -28,7 +28,7 @@ use crate::node::NodeResolution;
use crate::npm::create_npm_fs_resolver;
use crate::npm::NpmCache;
use crate::npm::NpmPackageResolver;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmRegistry;
use crate::npm::NpmResolution;
use crate::npm::PackageJsonDepsInstaller;
use crate::resolver::CliGraphResolver;
@ -47,7 +47,6 @@ use deno_core::resolve_url_or_path;
use deno_core::CompiledWasmModuleStore;
use deno_core::ModuleSpecifier;
use deno_core::SharedArrayBufferStore;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::source::Loader;
use deno_graph::source::Resolver;
use deno_graph::Module;
@ -59,6 +58,7 @@ use deno_runtime::deno_tls::rustls::RootCertStore;
use deno_runtime::deno_web::BlobStore;
use deno_runtime::inspector_server::InspectorServer;
use deno_runtime::permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageReqReference;
use import_map::ImportMap;
use log::warn;
use std::borrow::Cow;
@ -95,7 +95,7 @@ pub struct Inner {
pub resolver: Arc<CliGraphResolver>,
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
pub node_analysis_cache: NodeAnalysisCache,
pub npm_api: NpmRegistryApi,
pub npm_api: NpmRegistry,
pub npm_cache: NpmCache,
pub npm_resolver: NpmPackageResolver,
pub npm_resolution: NpmResolution,
@ -233,14 +233,14 @@ impl ProcState {
let lockfile = cli_options.maybe_lock_file();
let npm_registry_url = NpmRegistryApi::default_url().to_owned();
let npm_registry_url = NpmRegistry::default_url().to_owned();
let npm_cache = NpmCache::from_deno_dir(
&dir,
cli_options.cache_setting(),
http_client.clone(),
progress_bar.clone(),
);
let npm_api = NpmRegistryApi::new(
let npm_api = NpmRegistry::new(
npm_registry_url.clone(),
npm_cache.clone(),
http_client.clone(),

View file

@ -8,19 +8,20 @@ use deno_core::futures::future::LocalBoxFuture;
use deno_core::futures::FutureExt;
use deno_core::ModuleSpecifier;
use deno_core::TaskQueue;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageReq;
use deno_graph::source::NpmResolver;
use deno_graph::source::Resolver;
use deno_graph::source::UnknownBuiltInNodeModuleError;
use deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE;
use deno_npm::registry::NpmRegistryApi;
use deno_runtime::deno_node::is_builtin_node_module;
use deno_semver::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageReq;
use import_map::ImportMap;
use std::sync::Arc;
use crate::args::package_json::PackageJsonDeps;
use crate::args::JsxImportSourceConfig;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmRegistry;
use crate::npm::NpmResolution;
use crate::npm::PackageJsonDepsInstaller;
@ -32,7 +33,7 @@ pub struct CliGraphResolver {
maybe_default_jsx_import_source: Option<String>,
maybe_jsx_import_source_module: Option<String>,
no_npm: bool,
npm_registry_api: NpmRegistryApi,
npm_registry_api: NpmRegistry,
npm_resolution: NpmResolution,
package_json_deps_installer: PackageJsonDepsInstaller,
sync_download_queue: Option<Arc<TaskQueue>>,
@ -42,7 +43,7 @@ impl Default for CliGraphResolver {
fn default() -> Self {
// This is not ideal, but necessary for the LSP. In the future, we should
// refactor the LSP and force this to be initialized.
let npm_registry_api = NpmRegistryApi::new_uninitialized();
let npm_registry_api = NpmRegistry::new_uninitialized();
let npm_resolution =
NpmResolution::new(npm_registry_api.clone(), None, None);
Self {
@ -63,7 +64,7 @@ impl CliGraphResolver {
maybe_jsx_import_source_config: Option<JsxImportSourceConfig>,
maybe_import_map: Option<Arc<ImportMap>>,
no_npm: bool,
npm_registry_api: NpmRegistryApi,
npm_registry_api: NpmRegistry,
npm_resolution: NpmResolution,
package_json_deps_installer: PackageJsonDepsInstaller,
) -> Self {

View file

@ -187,7 +187,7 @@ impl ModuleLoader for EmbeddedModuleLoader {
}
let module = module?;
let code = module.source().await;
let code = module.source().await.unwrap_or_default();
let code = std::str::from_utf8(&code)
.map_err(|_| type_error("Module source is not utf-8"))?
.to_owned()

View file

@ -1119,7 +1119,13 @@ fn lock_file_missing_top_level_package() {
let stderr = String::from_utf8(output.stderr).unwrap();
assert_eq!(
stderr,
"error: failed reading lockfile 'deno.lock'\n\nCaused by:\n the lockfile is corrupt. You can recreate it with --lock-write\n"
concat!(
"error: failed reading lockfile 'deno.lock'\n",
"\n",
"Caused by:\n",
" 0: The lockfile is corrupt. You can recreate it with --lock-write\n",
" 1: Could not find referenced package 'cowsay@1.5.0' in the list of packages.\n"
)
);
}

View file

@ -1,2 +1,2 @@
Download http://localhost:4545/npm/registry/mkdirp
error: Could not find npm package 'mkdirp' matching 0.5.125. Try retrieving the latest npm package information by running with --reload
error: Could not find npm package 'mkdirp' matching '0.5.125'. Try retrieving the latest npm package information by running with --reload

View file

@ -10,25 +10,25 @@ use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_graph::npm::NpmPackageNv;
use deno_graph::npm::NpmPackageNvReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Dependency;
use deno_graph::Module;
use deno_graph::ModuleError;
use deno_graph::ModuleGraph;
use deno_graph::ModuleGraphError;
use deno_graph::Resolution;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_runtime::colors;
use deno_semver::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReqReference;
use crate::args::Flags;
use crate::args::InfoFlags;
use crate::display;
use crate::graph_util::graph_lock_or_exit;
use crate::npm::NpmPackageId;
use crate::npm::NpmPackageResolver;
use crate::npm::NpmResolutionPackage;
use crate::npm::NpmResolutionSnapshot;
use crate::proc_state::ProcState;
use crate::util::checksum;

View file

@ -15,7 +15,7 @@ use deno_core::error::generic_error;
use deno_core::error::AnyError;
use deno_core::resolve_url_or_path;
use deno_core::url::Url;
use deno_graph::npm::NpmPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use log::Level;
use once_cell::sync::Lazy;
use regex::Regex;

View file

@ -18,10 +18,10 @@ use deno_core::futures::StreamExt;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::LocalInspectorSession;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::source::Resolver;
use deno_runtime::deno_node;
use deno_runtime::worker::MainWorker;
use deno_semver::npm::NpmPackageReqReference;
use once_cell::sync::Lazy;
use super::cdp;

View file

@ -10,7 +10,7 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures;
use deno_core::futures::future::LocalBoxFuture;
use deno_graph::npm::NpmPackageNv;
use deno_semver::npm::NpmPackageNv;
use deno_task_shell::ExecuteResult;
use deno_task_shell::ShellCommand;
use deno_task_shell::ShellCommandContext;

View file

@ -17,7 +17,7 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::future::BoxFuture;
use deno_core::futures::FutureExt;
use deno_graph::semver::Version;
use deno_semver::Version;
use once_cell::sync::Lazy;
use std::borrow::Cow;
use std::env;

View file

@ -20,7 +20,7 @@ use deno_graph::ModuleGraph;
use import_map::ImportMap;
use crate::cache::ParsedSourceCache;
use crate::npm::NpmRegistryApi;
use crate::npm::NpmRegistry;
use crate::npm::NpmResolution;
use crate::npm::PackageJsonDepsInstaller;
use crate::resolver::CliGraphResolver;
@ -264,7 +264,7 @@ async fn build_test_graph(
analyzer: &dyn deno_graph::ModuleAnalyzer,
) -> ModuleGraph {
let resolver = original_import_map.map(|m| {
let npm_registry_api = NpmRegistryApi::new_uninitialized();
let npm_registry_api = NpmRegistry::new_uninitialized();
let npm_resolution =
NpmResolution::new(npm_registry_api.clone(), None, None);
let deps_installer = PackageJsonDepsInstaller::new(

View file

@ -31,13 +31,13 @@ use deno_core::ModuleSpecifier;
use deno_core::OpState;
use deno_core::RuntimeOptions;
use deno_core::Snapshot;
use deno_graph::npm::NpmPackageNvReference;
use deno_graph::npm::NpmPackageReqReference;
use deno_graph::Module;
use deno_graph::ModuleGraph;
use deno_graph::ResolutionResolved;
use deno_runtime::deno_node::NodeResolutionMode;
use deno_runtime::permissions::PermissionsContainer;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReqReference;
use lsp_types::Url;
use once_cell::sync::Lazy;
use std::borrow::Cow;

View file

@ -15,7 +15,6 @@ use deno_core::serde_v8;
use deno_core::v8;
use deno_core::Extension;
use deno_core::ModuleId;
use deno_graph::npm::NpmPackageReqReference;
use deno_runtime::colors;
use deno_runtime::deno_node;
use deno_runtime::fmt_errors::format_js_error;
@ -27,6 +26,7 @@ use deno_runtime::web_worker::WebWorkerOptions;
use deno_runtime::worker::MainWorker;
use deno_runtime::worker::WorkerOptions;
use deno_runtime::BootstrapOptions;
use deno_semver::npm::NpmPackageReqReference;
use crate::args::DenoSubcommand;
use crate::errors;