1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

chore: serve node headers from a test server to fix flaky node-gyp test (#26749)

Fixes https://github.com/denoland/deno/issues/24749

Runs a server that just returns the header tarball and checksum, and
sets the `NODEJS_ORG_MIRROR` env var so that `node-gyp` uses it instead
of `nodejs.org`
This commit is contained in:
Nathan Whitaker 2024-11-06 19:52:46 -08:00 committed by GitHub
parent 1cab4f07a3
commit 742744d498
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 287 additions and 9 deletions

1
Cargo.lock generated
View file

@ -7190,6 +7190,7 @@ dependencies = [
"console_static_text",
"deno_unsync",
"denokv_proto",
"faster-hex",
"fastwebsockets",
"flate2",
"futures",

View file

@ -2,7 +2,7 @@
"name": "@denotest/node-addon",
"version": "1.0.0",
"scripts": {
"install": "node-gyp configure build"
"install": "node-gyp configure --verbose build"
},
"dependencies": {
"node-gyp": "10.1.0"

Binary file not shown.

Binary file not shown.

View file

@ -21,6 +21,7 @@ bytes.workspace = true
console_static_text.workspace = true
deno_unsync = "0"
denokv_proto.workspace = true
faster-hex.workspace = true
fastwebsockets.workspace = true
flate2 = { workspace = true, features = ["default"] }
futures.workspace = true

View file

@ -28,6 +28,7 @@ use crate::fs::PathRef;
use crate::http_server;
use crate::jsr_registry_unset_url;
use crate::lsp::LspClientBuilder;
use crate::nodejs_org_mirror_unset_url;
use crate::npm_registry_unset_url;
use crate::pty::Pty;
use crate::strip_ansi_codes;
@ -843,6 +844,12 @@ impl TestCommandBuilder {
if !envs.contains_key("JSR_URL") {
envs.insert("JSR_URL".to_string(), jsr_registry_unset_url());
}
if !envs.contains_key("NODEJS_ORG_MIRROR") {
envs.insert(
"NODEJS_ORG_MIRROR".to_string(),
nodejs_org_mirror_unset_url(),
);
}
for key in &self.envs_remove {
envs.remove(key);
}

View file

@ -52,6 +52,7 @@ static GUARD: Lazy<Mutex<HttpServerCount>> = Lazy::new(Default::default);
pub fn env_vars_for_npm_tests() -> Vec<(String, String)> {
vec![
("NPM_CONFIG_REGISTRY".to_string(), npm_registry_url()),
("NODEJS_ORG_MIRROR".to_string(), nodejs_org_mirror_url()),
("NO_COLOR".to_string(), "1".to_string()),
]
}
@ -130,6 +131,7 @@ pub fn env_vars_for_jsr_npm_tests() -> Vec<(String, String)> {
),
("DISABLE_JSR_PROVENANCE".to_string(), "true".to_string()),
("NO_COLOR".to_string(), "1".to_string()),
("NODEJS_ORG_MIRROR".to_string(), nodejs_org_mirror_url()),
]
}
@ -175,27 +177,41 @@ pub fn deno_config_path() -> PathRef {
/// Test server registry url.
pub fn npm_registry_url() -> String {
"http://localhost:4260/".to_string()
format!("http://localhost:{}/", servers::PUBLIC_NPM_REGISTRY_PORT)
}
pub fn npm_registry_unset_url() -> String {
"http://NPM_CONFIG_REGISTRY.is.unset".to_string()
}
pub fn nodejs_org_mirror_url() -> String {
format!(
"http://127.0.0.1:{}/",
servers::NODEJS_ORG_MIRROR_SERVER_PORT
)
}
pub fn nodejs_org_mirror_unset_url() -> String {
"http://NODEJS_ORG_MIRROR.is.unset".to_string()
}
pub fn jsr_registry_url() -> String {
"http://127.0.0.1:4250/".to_string()
format!("http://127.0.0.1:{}/", servers::JSR_REGISTRY_SERVER_PORT)
}
pub fn rekor_url() -> String {
"http://127.0.0.1:4251".to_string()
format!("http://127.0.0.1:{}", servers::PROVENANCE_MOCK_SERVER_PORT)
}
pub fn fulcio_url() -> String {
"http://127.0.0.1:4251".to_string()
format!("http://127.0.0.1:{}", servers::PROVENANCE_MOCK_SERVER_PORT)
}
pub fn gha_token_url() -> String {
"http://127.0.0.1:4251/gha_oidc?test=true".to_string()
format!(
"http://127.0.0.1:{}/gha_oidc?test=true",
servers::PROVENANCE_MOCK_SERVER_PORT
)
}
pub fn jsr_registry_unset_url() -> String {
@ -307,7 +323,7 @@ async fn get_tcp_listener_stream(
futures::stream::select_all(listeners)
}
pub const TEST_SERVERS_COUNT: usize = 32;
pub const TEST_SERVERS_COUNT: usize = 33;
#[derive(Default)]
struct HttpServerCount {
@ -565,6 +581,7 @@ pub fn deno_cmd_with_deno_dir(deno_dir: &TempDir) -> TestCommandBuilder {
TestCommandBuilder::new(deno_dir.clone())
.env("DENO_DIR", deno_dir.path())
.env("NPM_CONFIG_REGISTRY", npm_registry_unset_url())
.env("NODEJS_ORG_MIRROR", nodejs_org_mirror_unset_url())
.env("JSR_URL", jsr_registry_unset_url())
}

View file

@ -39,6 +39,7 @@ use tokio::net::TcpStream;
mod grpc;
mod hyper_utils;
mod jsr_registry;
mod nodejs_org_mirror;
mod npm_registry;
mod ws;
@ -86,8 +87,9 @@ const WS_CLOSE_PORT: u16 = 4244;
const WS_PING_PORT: u16 = 4245;
const H2_GRPC_PORT: u16 = 4246;
const H2S_GRPC_PORT: u16 = 4247;
const JSR_REGISTRY_SERVER_PORT: u16 = 4250;
const PROVENANCE_MOCK_SERVER_PORT: u16 = 4251;
pub(crate) const JSR_REGISTRY_SERVER_PORT: u16 = 4250;
pub(crate) const PROVENANCE_MOCK_SERVER_PORT: u16 = 4251;
pub(crate) const NODEJS_ORG_MIRROR_SERVER_PORT: u16 = 4252;
pub(crate) const PUBLIC_NPM_REGISTRY_PORT: u16 = 4260;
pub(crate) const PRIVATE_NPM_REGISTRY_1_PORT: u16 = 4261;
pub(crate) const PRIVATE_NPM_REGISTRY_2_PORT: u16 = 4262;
@ -147,6 +149,10 @@ pub async fn run_all_servers() {
let private_npm_registry_3_server_futs =
npm_registry::private_npm_registry3(PRIVATE_NPM_REGISTRY_3_PORT);
// for serving node header files to node-gyp in tests
let node_js_mirror_server_fut =
nodejs_org_mirror::nodejs_org_mirror(NODEJS_ORG_MIRROR_SERVER_PORT);
let mut futures = vec![
redirect_server_fut.boxed_local(),
ws_server_fut.boxed_local(),
@ -172,6 +178,7 @@ pub async fn run_all_servers() {
h2_grpc_server_fut.boxed_local(),
registry_server_fut.boxed_local(),
provenance_mock_server_fut.boxed_local(),
node_js_mirror_server_fut.boxed_local(),
];
futures.extend(npm_registry_server_futs);
futures.extend(private_npm_registry_1_server_futs);

View file

@ -0,0 +1,245 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
//! Server for NodeJS header tarballs, used by `node-gyp` in tests to download headers
//!
//! Loads from `testdata/assets`, if we update our node version in `process.versions` we'll need to
//! update the header tarball there.
#![allow(clippy::print_stderr)]
use std::collections::HashMap;
use std::convert::Infallible;
use std::net::SocketAddr;
use std::sync::LazyLock;
use bytes::Bytes;
use http::Response;
use http::StatusCode;
use http_body_util::combinators::UnsyncBoxBody;
use http_body_util::Full;
use parking_lot::Mutex;
use crate::servers::hyper_utils::run_server;
use crate::servers::hyper_utils::ServerKind;
use crate::servers::hyper_utils::ServerOptions;
use crate::servers::string_body;
use crate::testdata_path;
use crate::PathRef;
/// a little helper extension trait to log errors but convert to option
trait OkWarn<T, E> {
fn ok_warn(self) -> Option<T>;
}
impl<T, E> OkWarn<T, E> for Result<T, E>
where
E: std::fmt::Display,
{
fn ok_warn(self) -> Option<T> {
self
.inspect_err(|err| {
eprintln!(
"test_server warning: error occurred in nodejs_org_mirror.rs: {err}"
)
})
.ok()
}
}
pub static NODEJS_MIRROR: LazyLock<NodeJsMirror> =
LazyLock::new(NodeJsMirror::default);
#[derive(Default)]
pub struct NodeJsMirror {
cache: Mutex<HashMap<String, Bytes>>,
checksum_cache: Mutex<HashMap<String, String>>,
}
fn asset_file_path(file: &str) -> PathRef {
testdata_path().join("assets").join("node-gyp").join(file)
}
impl NodeJsMirror {
pub fn get_header_bytes(&self, file: &str) -> Option<Bytes> {
let mut cache = self.cache.lock();
let entry = cache.entry(file.to_owned());
match entry {
std::collections::hash_map::Entry::Occupied(occupied) => {
Some(occupied.get().clone())
}
std::collections::hash_map::Entry::Vacant(vacant) => {
let contents = asset_file_path(file);
let contents = contents
.read_to_bytes_if_exists()
.ok_warn()
.map(Bytes::from)?;
vacant.insert(contents.clone());
Some(contents)
}
}
}
fn get_checksum(&self, file: &str, bytes: Bytes) -> String {
use sha2::Digest;
if let Some(checksum) = self.checksum_cache.lock().get(file).cloned() {
return checksum;
}
let mut hasher = sha2::Sha256::new();
hasher.update(&bytes);
let checksum = faster_hex::hex_string(hasher.finalize().as_ref());
self
.checksum_cache
.lock()
.insert(file.to_owned(), checksum.clone());
checksum
}
pub fn get_checksum_file(&self, version: &str) -> Option<String> {
let mut entries = Vec::with_capacity(2);
let header_file = header_tar_name(version);
let header_bytes = self.get_header_bytes(&header_file)?;
let header_checksum = self.get_checksum(&header_file, header_bytes);
entries.push((header_file, header_checksum));
if cfg!(windows) {
if !cfg!(target_arch = "x86_64") {
panic!("unsupported target arch on windows, only support x86_64");
}
let Some(bytes) = self.get_node_lib_bytes(version, "win-x64") else {
eprintln!("test server failed to get node lib");
return None;
};
{
let file = format!("{version}/win-x64/node.lib");
let checksum = self.get_checksum(&file, bytes);
let filename_for_checksum =
file.trim_start_matches(&format!("{version}/"));
entries.push((filename_for_checksum.to_owned(), checksum));
}
}
Some(
entries
.into_iter()
.map(|(file, checksum)| format!("{checksum} {file}"))
.collect::<Vec<_>>()
.join("\n"),
)
}
pub fn get_node_lib_bytes(
&self,
version: &str,
platform: &str,
) -> Option<Bytes> {
let mut cache = self.cache.lock();
let file_name = format!("{version}/{platform}/node.lib");
let entry = cache.entry(file_name);
match entry {
std::collections::hash_map::Entry::Occupied(occupied) => {
Some(occupied.get().clone())
}
std::collections::hash_map::Entry::Vacant(vacant) => {
let tarball_filename =
format!("{version}__{platform}__node.lib.tar.gz");
let contents = asset_file_path(&tarball_filename);
let contents = contents.read_to_bytes_if_exists().ok_warn()?;
let extracted = Bytes::from(extract_tarball(&contents)?);
vacant.insert(extracted.clone());
Some(extracted)
}
}
}
}
fn header_tar_name(version: &str) -> String {
format!("node-{version}-headers.tar.gz")
}
fn extract_tarball(compressed: &[u8]) -> Option<Vec<u8>> {
let mut out = Vec::with_capacity(compressed.len());
let decoder = flate2::read::GzDecoder::new(compressed);
let mut archive = tar::Archive::new(decoder);
for file in archive.entries().ok_warn()? {
let mut file = file.ok_warn()?;
std::io::copy(&mut file, &mut out).ok_warn()?;
}
Some(out)
}
/// Server for node JS header tarballs, used by `node-gyp` in tests
pub async fn nodejs_org_mirror(port: u16) {
let addr = SocketAddr::from(([127, 0, 0, 1], port));
run_server(
ServerOptions {
addr,
error_msg: "nodejs mirror server error",
kind: ServerKind::Auto,
},
|req| async move {
let path = req.uri().path();
if path.contains("-headers.tar.gz")
|| path.contains("SHASUMS256.txt")
|| path.contains("node.lib")
{
let mut parts = path.split('/');
let _ = parts.next(); // empty
let Some(version) = parts.next() else {
return not_found(format!("missing node version in path: {path}"));
};
let Some(file) = parts.next() else {
return not_found(format!("missing file version in path: {path}"));
};
if file == "SHASUMS256.txt" {
let Some(checksum_file) = NODEJS_MIRROR.get_checksum_file(version)
else {
return not_found(format!("failed to get header checksum: {path}"));
};
return Ok(Response::new(string_body(&checksum_file)));
} else if !file.contains("headers") {
let platform = file;
let Some(file) = parts.next() else {
return not_found("expected file");
};
if file != "node.lib" {
return not_found(format!(
"unexpected file name, expected node.lib, got: {file}"
));
}
let Some(bytes) = NODEJS_MIRROR.get_node_lib_bytes(version, platform)
else {
return not_found("expected node lib bytes");
};
return Ok(Response::new(UnsyncBoxBody::new(Full::new(bytes))));
}
let Some(bytes) = NODEJS_MIRROR.get_header_bytes(file) else {
return not_found(format!(
"couldn't find headers for version {version}, missing file: {file}"
));
};
Ok(Response::new(UnsyncBoxBody::new(Full::new(bytes))))
} else {
not_found(format!("unexpected request path: {path}"))
}
},
)
.await
}
fn not_found(
msg: impl AsRef<str>,
) -> Result<Response<UnsyncBoxBody<Bytes, Infallible>>, anyhow::Error> {
let msg = msg.as_ref();
eprintln!(
"test_server warning: error likely occurred in nodejs_org_mirror.rs: {msg}"
);
Response::builder()
.status(StatusCode::NOT_FOUND)
.body(string_body(msg))
.map_err(|e| e.into())
}