mirror of
https://github.com/denoland/deno.git
synced 2024-12-26 17:19:06 -05:00
refactor: simplify hyper, http, h2 deps (#21715)
Main change is that: - "hyper" has been renamed to "hyper_v014" to signal that it's legacy - "hyper1" has been renamed to "hyper" and should be the default
This commit is contained in:
parent
c73abff4af
commit
69cc117015
33 changed files with 252 additions and 256 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -966,7 +966,6 @@ dependencies = [
|
|||
"glibc_version",
|
||||
"glob",
|
||||
"hex",
|
||||
"http 0.2.11",
|
||||
"http 1.0.0",
|
||||
"http-body-util",
|
||||
"hyper 1.1.0",
|
||||
|
@ -1620,7 +1619,6 @@ dependencies = [
|
|||
"flate2",
|
||||
"fs3",
|
||||
"fwdansi",
|
||||
"http 0.2.11",
|
||||
"http 1.0.0",
|
||||
"http-body-util",
|
||||
"hyper 0.14.27",
|
||||
|
|
|
@ -103,13 +103,14 @@ fs3 = "0.5.0"
|
|||
futures = "0.3.21"
|
||||
glob = "0.3.1"
|
||||
hex = "0.4"
|
||||
http = "0.2.9"
|
||||
h2 = { version = "0.3.17", features = ["unstable"] }
|
||||
http_v02 = { package = "http", version = "0.2.9" }
|
||||
http = "1.0"
|
||||
h2 = "0.4"
|
||||
httparse = "1.8.0"
|
||||
hyper-util = { version = "=0.1.2", features = ["tokio", "server", "server-auto"] }
|
||||
http-body-util = "0.1"
|
||||
hyper = { version = "0.14.26", features = ["runtime", "http1"] }
|
||||
hyper1 = { package = "hyper", version = "=1.1.0", features = ["full"] }
|
||||
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
|
||||
hyper = { version = "=1.1.0", features = ["full"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
libc = "0.2.126"
|
||||
libz-sys = { version = "1.1", default-features = false }
|
||||
|
|
|
@ -100,7 +100,6 @@ flate2.workspace = true
|
|||
fs3.workspace = true
|
||||
glob = "0.3.1"
|
||||
hex.workspace = true
|
||||
http.workspace = true
|
||||
import_map = { version = "=0.18.0", features = ["ext"] }
|
||||
indexmap.workspace = true
|
||||
jsonc-parser = { version = "=0.23.0", features = ["serde"] }
|
||||
|
@ -150,12 +149,12 @@ nix.workspace = true
|
|||
|
||||
[dev-dependencies]
|
||||
deno_bench_util.workspace = true
|
||||
fastwebsockets_06 = { package = "fastwebsockets", version = "0.6", features = ["upgrade", "unstable-split"] }
|
||||
fastwebsockets = { workspace = true, features = ["upgrade", "unstable-split"] }
|
||||
flaky_test = "=0.1.0"
|
||||
http.workspace = true
|
||||
http-body-util.workspace = true
|
||||
http_1 = { package = "http", version = "1.0" }
|
||||
hyper.workspace = true
|
||||
hyper-util.workspace = true
|
||||
hyper1.workspace = true
|
||||
once_cell.workspace = true
|
||||
os_pipe.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
|
|
|
@ -7,13 +7,13 @@ use deno_core::serde_json;
|
|||
use deno_core::serde_json::json;
|
||||
use deno_core::url;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use fastwebsockets_06::FragmentCollector;
|
||||
use fastwebsockets_06::Frame;
|
||||
use fastwebsockets_06::WebSocket;
|
||||
use hyper1::body::Incoming;
|
||||
use hyper1::upgrade::Upgraded;
|
||||
use hyper1::Request;
|
||||
use hyper1::Response;
|
||||
use fastwebsockets::FragmentCollector;
|
||||
use fastwebsockets::Frame;
|
||||
use fastwebsockets::WebSocket;
|
||||
use hyper::body::Incoming;
|
||||
use hyper::upgrade::Upgraded;
|
||||
use hyper::Request;
|
||||
use hyper::Response;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use std::io::BufRead;
|
||||
use std::time::Duration;
|
||||
|
@ -25,14 +25,9 @@ use util::assert_starts_with;
|
|||
use util::DenoChild;
|
||||
use util::TestContextBuilder;
|
||||
|
||||
// TODO(bartlomieju): remove `http::header` once we update to `reqwest`
|
||||
// to version that uses Hyper 1.0
|
||||
use http::header::HeaderValue;
|
||||
use http::header::HOST;
|
||||
|
||||
struct SpawnExecutor;
|
||||
|
||||
impl<Fut> hyper1::rt::Executor<Fut> for SpawnExecutor
|
||||
impl<Fut> hyper::rt::Executor<Fut> for SpawnExecutor
|
||||
where
|
||||
Fut: std::future::Future + Send + 'static,
|
||||
Fut::Output: Send + 'static,
|
||||
|
@ -60,17 +55,17 @@ async fn connect_to_ws(
|
|||
.method("GET")
|
||||
.uri(uri.path())
|
||||
.header("Host", host)
|
||||
.header(hyper1::header::UPGRADE, "websocket")
|
||||
.header(hyper1::header::CONNECTION, "Upgrade")
|
||||
.header(hyper::header::UPGRADE, "websocket")
|
||||
.header(hyper::header::CONNECTION, "Upgrade")
|
||||
.header(
|
||||
"Sec-WebSocket-Key",
|
||||
fastwebsockets_06::handshake::generate_key(),
|
||||
fastwebsockets::handshake::generate_key(),
|
||||
)
|
||||
.header("Sec-WebSocket-Version", "13")
|
||||
.body(http_body_util::Empty::<Bytes>::new())
|
||||
.unwrap();
|
||||
|
||||
fastwebsockets_06::handshake::client(&SpawnExecutor, req, stream)
|
||||
fastwebsockets::handshake::client(&SpawnExecutor, req, stream)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
@ -730,9 +725,10 @@ async fn inspector_json() {
|
|||
] {
|
||||
let mut req = reqwest::Request::new(reqwest::Method::GET, url.clone());
|
||||
if let Some(host) = host {
|
||||
req
|
||||
.headers_mut()
|
||||
.insert(HOST, HeaderValue::from_static(host));
|
||||
req.headers_mut().insert(
|
||||
reqwest::header::HOST,
|
||||
reqwest::header::HeaderValue::from_static(host),
|
||||
);
|
||||
}
|
||||
let resp = client.execute(req).await.unwrap();
|
||||
assert_eq!(resp.status(), reqwest::StatusCode::OK);
|
||||
|
|
|
@ -4379,32 +4379,32 @@ async fn websocketstream_ping() {
|
|||
let script = util::testdata_path().join("run/websocketstream_ping_test.ts");
|
||||
let root_ca = util::testdata_path().join("tls/RootCA.pem");
|
||||
|
||||
let srv_fn = hyper1::service::service_fn(|mut req| async move {
|
||||
let srv_fn = hyper::service::service_fn(|mut req| async move {
|
||||
let (response, upgrade_fut) =
|
||||
fastwebsockets_06::upgrade::upgrade(&mut req).unwrap();
|
||||
fastwebsockets::upgrade::upgrade(&mut req).unwrap();
|
||||
tokio::spawn(async move {
|
||||
let mut ws = upgrade_fut.await.unwrap();
|
||||
|
||||
ws.write_frame(fastwebsockets_06::Frame::text(b"A"[..].into()))
|
||||
ws.write_frame(fastwebsockets::Frame::text(b"A"[..].into()))
|
||||
.await
|
||||
.unwrap();
|
||||
ws.write_frame(fastwebsockets_06::Frame::new(
|
||||
ws.write_frame(fastwebsockets::Frame::new(
|
||||
true,
|
||||
fastwebsockets_06::OpCode::Ping,
|
||||
fastwebsockets::OpCode::Ping,
|
||||
None,
|
||||
vec![].into(),
|
||||
))
|
||||
.await
|
||||
.unwrap();
|
||||
ws.write_frame(fastwebsockets_06::Frame::text(b"B"[..].into()))
|
||||
ws.write_frame(fastwebsockets::Frame::text(b"B"[..].into()))
|
||||
.await
|
||||
.unwrap();
|
||||
let message = ws.read_frame().await.unwrap();
|
||||
assert_eq!(message.opcode, fastwebsockets_06::OpCode::Pong);
|
||||
ws.write_frame(fastwebsockets_06::Frame::text(b"C"[..].into()))
|
||||
assert_eq!(message.opcode, fastwebsockets::OpCode::Pong);
|
||||
ws.write_frame(fastwebsockets::Frame::text(b"C"[..].into()))
|
||||
.await
|
||||
.unwrap();
|
||||
ws.write_frame(fastwebsockets_06::Frame::close_raw(vec![].into()))
|
||||
ws.write_frame(fastwebsockets::Frame::close_raw(vec![].into()))
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
|
@ -4427,7 +4427,7 @@ async fn websocketstream_ping() {
|
|||
tokio::spawn(async move {
|
||||
let (stream, _) = server.accept().await.unwrap();
|
||||
let io = hyper_util::rt::TokioIo::new(stream);
|
||||
let conn_fut = hyper1::server::conn::http1::Builder::new()
|
||||
let conn_fut = hyper::server::conn::http1::Builder::new()
|
||||
.serve_connection(io, srv_fn)
|
||||
.with_upgrades();
|
||||
|
||||
|
@ -4442,7 +4442,7 @@ async fn websocketstream_ping() {
|
|||
|
||||
struct SpawnExecutor;
|
||||
|
||||
impl<Fut> hyper1::rt::Executor<Fut> for SpawnExecutor
|
||||
impl<Fut> hyper::rt::Executor<Fut> for SpawnExecutor
|
||||
where
|
||||
Fut: std::future::Future + Send + 'static,
|
||||
Fut::Output: Send + 'static,
|
||||
|
@ -4478,12 +4478,12 @@ async fn websocket_server_multi_field_connection_header() {
|
|||
let stream = tokio::net::TcpStream::connect("localhost:4319")
|
||||
.await
|
||||
.unwrap();
|
||||
let req = http_1::Request::builder()
|
||||
.header(http_1::header::UPGRADE, "websocket")
|
||||
.header(http_1::header::CONNECTION, "keep-alive, Upgrade")
|
||||
let req = http::Request::builder()
|
||||
.header(http::header::UPGRADE, "websocket")
|
||||
.header(http::header::CONNECTION, "keep-alive, Upgrade")
|
||||
.header(
|
||||
"Sec-WebSocket-Key",
|
||||
fastwebsockets_06::handshake::generate_key(),
|
||||
fastwebsockets::handshake::generate_key(),
|
||||
)
|
||||
.header("Sec-WebSocket-Version", "13")
|
||||
.uri("ws://localhost:4319")
|
||||
|
@ -4491,15 +4491,15 @@ async fn websocket_server_multi_field_connection_header() {
|
|||
.unwrap();
|
||||
|
||||
let (mut socket, _) =
|
||||
fastwebsockets_06::handshake::client(&SpawnExecutor, req, stream)
|
||||
fastwebsockets::handshake::client(&SpawnExecutor, req, stream)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let message = socket.read_frame().await.unwrap();
|
||||
assert_eq!(message.opcode, fastwebsockets_06::OpCode::Close);
|
||||
assert_eq!(message.opcode, fastwebsockets::OpCode::Close);
|
||||
assert!(message.payload.is_empty());
|
||||
socket
|
||||
.write_frame(fastwebsockets_06::Frame::close_raw(vec![].into()))
|
||||
.write_frame(fastwebsockets::Frame::close_raw(vec![].into()))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(child.wait().unwrap().success());
|
||||
|
@ -4533,12 +4533,12 @@ async fn websocket_server_idletimeout() {
|
|||
let stream = tokio::net::TcpStream::connect("localhost:4509")
|
||||
.await
|
||||
.unwrap();
|
||||
let req = http_1::Request::builder()
|
||||
.header(http_1::header::UPGRADE, "websocket")
|
||||
.header(http_1::header::CONNECTION, "keep-alive, Upgrade")
|
||||
let req = http::Request::builder()
|
||||
.header(http::header::UPGRADE, "websocket")
|
||||
.header(http::header::CONNECTION, "keep-alive, Upgrade")
|
||||
.header(
|
||||
"Sec-WebSocket-Key",
|
||||
fastwebsockets_06::handshake::generate_key(),
|
||||
fastwebsockets::handshake::generate_key(),
|
||||
)
|
||||
.header("Sec-WebSocket-Version", "13")
|
||||
.uri("ws://localhost:4509")
|
||||
|
@ -4546,7 +4546,7 @@ async fn websocket_server_idletimeout() {
|
|||
.unwrap();
|
||||
|
||||
let (_socket, _) =
|
||||
fastwebsockets_06::handshake::client(&SpawnExecutor, req, stream)
|
||||
fastwebsockets::handshake::client(&SpawnExecutor, req, stream)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@ use deno_core::unsync::JoinHandle;
|
|||
use deno_core::unsync::JoinSet;
|
||||
use deno_runtime::colors;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use http::header::AUTHORIZATION;
|
||||
use http::header::CONTENT_ENCODING;
|
||||
use import_map::ImportMap;
|
||||
use lsp_types::Url;
|
||||
use serde::Serialize;
|
||||
|
@ -548,8 +546,8 @@ async fn publish_package(
|
|||
|
||||
let response = client
|
||||
.post(url)
|
||||
.header(AUTHORIZATION, authorization)
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.header(reqwest::header::AUTHORIZATION, authorization)
|
||||
.header(reqwest::header::CONTENT_ENCODING, "gzip")
|
||||
.body(package.tarball.clone())
|
||||
.send()
|
||||
.await?;
|
||||
|
|
|
@ -19,7 +19,7 @@ data-url.workspace = true
|
|||
deno_core.workspace = true
|
||||
deno_tls.workspace = true
|
||||
dyn-clone = "1"
|
||||
http.workspace = true
|
||||
http_v02.workspace = true
|
||||
pin-project.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
|
|
|
@ -31,7 +31,7 @@ impl FetchHandler for FsFetchHandler {
|
|||
let file = tokio::fs::File::open(path).map_err(|_| ()).await?;
|
||||
let stream = ReaderStream::new(file);
|
||||
let body = reqwest::Body::wrap_stream(stream);
|
||||
let response = http::Response::builder()
|
||||
let response = http_v02::Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(body)
|
||||
.map_err(|_| ())?
|
||||
|
|
|
@ -44,8 +44,8 @@ use deno_tls::Proxy;
|
|||
use deno_tls::RootCertStoreProvider;
|
||||
|
||||
use data_url::DataUrl;
|
||||
use http::header::CONTENT_LENGTH;
|
||||
use http::Uri;
|
||||
use http_v02::header::CONTENT_LENGTH;
|
||||
use http_v02::Uri;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
|
@ -416,9 +416,12 @@ where
|
|||
.decode_to_vec()
|
||||
.map_err(|e| type_error(format!("{e:?}")))?;
|
||||
|
||||
let response = http::Response::builder()
|
||||
.status(http::StatusCode::OK)
|
||||
.header(http::header::CONTENT_TYPE, data_url.mime_type().to_string())
|
||||
let response = http_v02::Response::builder()
|
||||
.status(http_v02::StatusCode::OK)
|
||||
.header(
|
||||
http_v02::header::CONTENT_TYPE,
|
||||
data_url.mime_type().to_string(),
|
||||
)
|
||||
.body(reqwest::Body::from(body))?;
|
||||
|
||||
let fut = async move { Ok(Ok(Response::from(response))) };
|
||||
|
|
|
@ -32,11 +32,11 @@ deno_net.workspace = true
|
|||
deno_websocket.workspace = true
|
||||
flate2.workspace = true
|
||||
http.workspace = true
|
||||
http_1 = { package = "http", version = "=1.0.0" }
|
||||
http_v02.workspace = true
|
||||
httparse.workspace = true
|
||||
hyper = { workspace = true, features = ["server", "stream", "http1", "http2", "runtime"] }
|
||||
hyper.workspace = true
|
||||
hyper-util.workspace = true
|
||||
hyper1.workspace = true
|
||||
hyper_v014 = { workspace = true, features = ["server", "stream", "http1", "http2", "runtime"] }
|
||||
itertools = "0.10"
|
||||
memmem.workspace = true
|
||||
mime = "0.3.16"
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
// Forked from https://github.com/superfly/accept-encoding/blob/1cded757ec7ff3916e5bfe7441db76cdc48170dc/
|
||||
// Forked to support both http 0.3 and http 1.0 crates.
|
||||
|
||||
use http as http_02;
|
||||
use itertools::Itertools;
|
||||
|
||||
/// A list enumerating the categories of errors in this crate.
|
||||
|
@ -78,10 +77,10 @@ pub fn preferred(
|
|||
///
|
||||
/// Compatible with `http` crate for version 0.2.x.
|
||||
pub fn encodings_iter_http_02(
|
||||
headers: &http_02::HeaderMap,
|
||||
headers: &http_v02::HeaderMap,
|
||||
) -> impl Iterator<Item = Result<(Option<Encoding>, f32), EncodingError>> + '_ {
|
||||
let iter = headers
|
||||
.get_all(http_02::header::ACCEPT_ENCODING)
|
||||
.get_all(http_v02::header::ACCEPT_ENCODING)
|
||||
.iter()
|
||||
.map(|hval| hval.to_str().map_err(|_| EncodingError::InvalidEncoding));
|
||||
encodings_iter_inner(iter)
|
||||
|
@ -91,10 +90,10 @@ pub fn encodings_iter_http_02(
|
|||
///
|
||||
/// Compatible with `http` crate for version 1.x.
|
||||
pub fn encodings_iter_http_1(
|
||||
headers: &http_1::HeaderMap,
|
||||
headers: &http::HeaderMap,
|
||||
) -> impl Iterator<Item = Result<(Option<Encoding>, f32), EncodingError>> + '_ {
|
||||
let iter = headers
|
||||
.get_all(http_1::header::ACCEPT_ENCODING)
|
||||
.get_all(http::header::ACCEPT_ENCODING)
|
||||
.iter()
|
||||
.map(|hval| hval.to_str().map_err(|_| EncodingError::InvalidEncoding));
|
||||
encodings_iter_inner(iter)
|
||||
|
@ -126,9 +125,9 @@ fn encodings_iter_inner<'s>(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use http::header::ACCEPT_ENCODING;
|
||||
use http::HeaderMap;
|
||||
use http::HeaderValue;
|
||||
use http_v02::header::ACCEPT_ENCODING;
|
||||
use http_v02::HeaderMap;
|
||||
use http_v02::HeaderValue;
|
||||
|
||||
fn encodings(
|
||||
headers: &HeaderMap,
|
||||
|
|
|
@ -43,22 +43,22 @@ use deno_core::ResourceId;
|
|||
use deno_net::ops_tls::TlsStream;
|
||||
use deno_net::raw::NetworkStream;
|
||||
use deno_websocket::ws_create_server_stream;
|
||||
use hyper1::body::Incoming;
|
||||
use hyper1::header::HeaderMap;
|
||||
use hyper1::header::ACCEPT_ENCODING;
|
||||
use hyper1::header::CACHE_CONTROL;
|
||||
use hyper1::header::CONTENT_ENCODING;
|
||||
use hyper1::header::CONTENT_LENGTH;
|
||||
use hyper1::header::CONTENT_RANGE;
|
||||
use hyper1::header::CONTENT_TYPE;
|
||||
use hyper1::header::COOKIE;
|
||||
use hyper1::http::HeaderName;
|
||||
use hyper1::http::HeaderValue;
|
||||
use hyper1::server::conn::http1;
|
||||
use hyper1::server::conn::http2;
|
||||
use hyper1::service::service_fn;
|
||||
use hyper1::service::HttpService;
|
||||
use hyper1::StatusCode;
|
||||
use hyper::body::Incoming;
|
||||
use hyper::header::HeaderMap;
|
||||
use hyper::header::ACCEPT_ENCODING;
|
||||
use hyper::header::CACHE_CONTROL;
|
||||
use hyper::header::CONTENT_ENCODING;
|
||||
use hyper::header::CONTENT_LENGTH;
|
||||
use hyper::header::CONTENT_RANGE;
|
||||
use hyper::header::CONTENT_TYPE;
|
||||
use hyper::header::COOKIE;
|
||||
use hyper::http::HeaderName;
|
||||
use hyper::http::HeaderValue;
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::server::conn::http2;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::service::HttpService;
|
||||
use hyper::StatusCode;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use once_cell::sync::Lazy;
|
||||
use smallvec::SmallVec;
|
||||
|
@ -77,7 +77,7 @@ use fly_accept_encoding::Encoding;
|
|||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
type Request = hyper1::Request<Incoming>;
|
||||
type Request = hyper::Request<Incoming>;
|
||||
|
||||
static USE_WRITEV: Lazy<bool> = Lazy::new(|| {
|
||||
let enable = std::env::var("DENO_USE_WRITEV").ok();
|
||||
|
@ -635,7 +635,7 @@ fn modify_compressibility_from_response(
|
|||
/// If the user provided a ETag header for uncompressed data, we need to ensure it is a
|
||||
/// weak Etag header ("W/").
|
||||
fn weaken_etag(hmap: &mut HeaderMap) {
|
||||
if let Some(etag) = hmap.get_mut(hyper1::header::ETAG) {
|
||||
if let Some(etag) = hmap.get_mut(hyper::header::ETAG) {
|
||||
if !etag.as_bytes().starts_with(b"W/") {
|
||||
let mut v = Vec::with_capacity(etag.as_bytes().len() + 2);
|
||||
v.extend(b"W/");
|
||||
|
@ -650,7 +650,7 @@ fn weaken_etag(hmap: &mut HeaderMap) {
|
|||
// to make sure cache services do not serve uncompressed data to clients that
|
||||
// support compression.
|
||||
fn ensure_vary_accept_encoding(hmap: &mut HeaderMap) {
|
||||
if let Some(v) = hmap.get_mut(hyper1::header::VARY) {
|
||||
if let Some(v) = hmap.get_mut(hyper::header::VARY) {
|
||||
if let Ok(s) = v.to_str() {
|
||||
if !s.to_lowercase().contains("accept-encoding") {
|
||||
*v = format!("Accept-Encoding, {s}").try_into().unwrap()
|
||||
|
@ -659,7 +659,7 @@ fn ensure_vary_accept_encoding(hmap: &mut HeaderMap) {
|
|||
}
|
||||
}
|
||||
hmap.insert(
|
||||
hyper1::header::VARY,
|
||||
hyper::header::VARY,
|
||||
HeaderValue::from_static("Accept-Encoding"),
|
||||
);
|
||||
}
|
||||
|
@ -791,7 +791,7 @@ fn serve_http11_unconditional(
|
|||
io: impl HttpServeStream,
|
||||
svc: impl HttpService<Incoming, ResBody = HttpRecordResponse> + 'static,
|
||||
cancel: Rc<CancelHandle>,
|
||||
) -> impl Future<Output = Result<(), hyper1::Error>> + 'static {
|
||||
) -> impl Future<Output = Result<(), hyper::Error>> + 'static {
|
||||
let conn = http1::Builder::new()
|
||||
.keep_alive(true)
|
||||
.writev(*USE_WRITEV)
|
||||
|
@ -813,7 +813,7 @@ fn serve_http2_unconditional(
|
|||
io: impl HttpServeStream,
|
||||
svc: impl HttpService<Incoming, ResBody = HttpRecordResponse> + 'static,
|
||||
cancel: Rc<CancelHandle>,
|
||||
) -> impl Future<Output = Result<(), hyper1::Error>> + 'static {
|
||||
) -> impl Future<Output = Result<(), hyper::Error>> + 'static {
|
||||
let conn =
|
||||
http2::Builder::new(LocalExecutor).serve_connection(TokioIo::new(io), svc);
|
||||
async {
|
||||
|
|
|
@ -41,18 +41,18 @@ use deno_net::raw::NetworkStream;
|
|||
use deno_websocket::ws_create_server_stream;
|
||||
use flate2::write::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use hyper::body::Bytes;
|
||||
use hyper::body::HttpBody;
|
||||
use hyper::body::SizeHint;
|
||||
use hyper::header::HeaderName;
|
||||
use hyper::header::HeaderValue;
|
||||
use hyper::server::conn::Http;
|
||||
use hyper::service::Service;
|
||||
use hyper::Body;
|
||||
use hyper::HeaderMap;
|
||||
use hyper::Request;
|
||||
use hyper::Response;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use hyper_v014::body::Bytes;
|
||||
use hyper_v014::body::HttpBody;
|
||||
use hyper_v014::body::SizeHint;
|
||||
use hyper_v014::header::HeaderName;
|
||||
use hyper_v014::header::HeaderValue;
|
||||
use hyper_v014::server::conn::Http;
|
||||
use hyper_v014::service::Service;
|
||||
use hyper_v014::Body;
|
||||
use hyper_v014::HeaderMap;
|
||||
use hyper_v014::Request;
|
||||
use hyper_v014::Response;
|
||||
use serde::Serialize;
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
|
@ -157,7 +157,7 @@ struct HttpConnResource {
|
|||
addr: HttpSocketAddr,
|
||||
scheme: &'static str,
|
||||
acceptors_tx: mpsc::UnboundedSender<HttpAcceptor>,
|
||||
closed_fut: Shared<RemoteHandle<Result<(), Arc<hyper::Error>>>>,
|
||||
closed_fut: Shared<RemoteHandle<Result<(), Arc<hyper_v014::Error>>>>,
|
||||
cancel_handle: Rc<CancelHandle>, // Closes gracefully and cancels accept ops.
|
||||
}
|
||||
|
||||
|
@ -470,10 +470,10 @@ impl Default for HttpResponseWriter {
|
|||
}
|
||||
}
|
||||
|
||||
struct BodyUncompressedSender(Option<hyper::body::Sender>);
|
||||
struct BodyUncompressedSender(Option<hyper_v014::body::Sender>);
|
||||
|
||||
impl BodyUncompressedSender {
|
||||
fn sender(&mut self) -> &mut hyper::body::Sender {
|
||||
fn sender(&mut self) -> &mut hyper_v014::body::Sender {
|
||||
// This is safe because we only ever take the sender out of the option
|
||||
// inside of the shutdown method.
|
||||
self.0.as_mut().unwrap()
|
||||
|
@ -486,8 +486,8 @@ impl BodyUncompressedSender {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<hyper::body::Sender> for BodyUncompressedSender {
|
||||
fn from(sender: hyper::body::Sender) -> Self {
|
||||
impl From<hyper_v014::body::Sender> for BodyUncompressedSender {
|
||||
fn from(sender: hyper_v014::body::Sender) -> Self {
|
||||
BodyUncompressedSender(Some(sender))
|
||||
}
|
||||
}
|
||||
|
@ -535,7 +535,7 @@ async fn op_http_accept(
|
|||
}
|
||||
|
||||
fn req_url(
|
||||
req: &hyper::Request<hyper::Body>,
|
||||
req: &hyper_v014::Request<hyper_v014::Body>,
|
||||
scheme: &'static str,
|
||||
addr: &HttpSocketAddr,
|
||||
) -> String {
|
||||
|
@ -601,7 +601,7 @@ fn req_headers(
|
|||
|
||||
let mut headers = Vec::with_capacity(header_map.len());
|
||||
for (name, value) in header_map.iter() {
|
||||
if name == hyper::header::COOKIE {
|
||||
if name == hyper_v014::header::COOKIE {
|
||||
cookies.push(value.as_bytes());
|
||||
} else {
|
||||
let name: &[u8] = name.as_ref();
|
||||
|
@ -657,10 +657,10 @@ async fn op_http_write_headers(
|
|||
if compressing {
|
||||
weaken_etag(hmap);
|
||||
// Drop 'content-length' header. Hyper will update it using compressed body.
|
||||
hmap.remove(hyper::header::CONTENT_LENGTH);
|
||||
hmap.remove(hyper_v014::header::CONTENT_LENGTH);
|
||||
// Content-Encoding header
|
||||
hmap.insert(
|
||||
hyper::header::CONTENT_ENCODING,
|
||||
hyper_v014::header::CONTENT_ENCODING,
|
||||
HeaderValue::from_static(match encoding {
|
||||
Encoding::Brotli => "br",
|
||||
Encoding::Gzip => "gzip",
|
||||
|
@ -708,7 +708,7 @@ fn http_response(
|
|||
data: Option<StringOrBuffer>,
|
||||
compressing: bool,
|
||||
encoding: Encoding,
|
||||
) -> Result<(HttpResponseWriter, hyper::Body), AnyError> {
|
||||
) -> Result<(HttpResponseWriter, hyper_v014::Body), AnyError> {
|
||||
// Gzip, after level 1, doesn't produce significant size difference.
|
||||
// This default matches nginx default gzip compression level (1):
|
||||
// https://nginx.org/en/docs/http/ngx_http_gzip_module.html#gzip_comp_level
|
||||
|
@ -780,8 +780,8 @@ fn http_response(
|
|||
|
||||
// If user provided a ETag header for uncompressed data, we need to
|
||||
// ensure it is a Weak Etag header ("W/").
|
||||
fn weaken_etag(hmap: &mut hyper::HeaderMap) {
|
||||
if let Some(etag) = hmap.get_mut(hyper::header::ETAG) {
|
||||
fn weaken_etag(hmap: &mut hyper_v014::HeaderMap) {
|
||||
if let Some(etag) = hmap.get_mut(hyper_v014::header::ETAG) {
|
||||
if !etag.as_bytes().starts_with(b"W/") {
|
||||
let mut v = Vec::with_capacity(etag.as_bytes().len() + 2);
|
||||
v.extend(b"W/");
|
||||
|
@ -795,8 +795,8 @@ fn weaken_etag(hmap: &mut hyper::HeaderMap) {
|
|||
// Note: we set the header irrespective of whether or not we compress the data
|
||||
// to make sure cache services do not serve uncompressed data to clients that
|
||||
// support compression.
|
||||
fn ensure_vary_accept_encoding(hmap: &mut hyper::HeaderMap) {
|
||||
if let Some(v) = hmap.get_mut(hyper::header::VARY) {
|
||||
fn ensure_vary_accept_encoding(hmap: &mut hyper_v014::HeaderMap) {
|
||||
if let Some(v) = hmap.get_mut(hyper_v014::header::VARY) {
|
||||
if let Ok(s) = v.to_str() {
|
||||
if !s.to_lowercase().contains("accept-encoding") {
|
||||
*v = format!("Accept-Encoding, {s}").try_into().unwrap()
|
||||
|
@ -805,15 +805,17 @@ fn ensure_vary_accept_encoding(hmap: &mut hyper::HeaderMap) {
|
|||
}
|
||||
}
|
||||
hmap.insert(
|
||||
hyper::header::VARY,
|
||||
hyper_v014::header::VARY,
|
||||
HeaderValue::from_static("Accept-Encoding"),
|
||||
);
|
||||
}
|
||||
|
||||
fn should_compress(headers: &hyper::HeaderMap) -> bool {
|
||||
fn should_compress(headers: &hyper_v014::HeaderMap) -> bool {
|
||||
// skip compression if the cache-control header value is set to "no-transform" or not utf8
|
||||
fn cache_control_no_transform(headers: &hyper::HeaderMap) -> Option<bool> {
|
||||
let v = headers.get(hyper::header::CACHE_CONTROL)?;
|
||||
fn cache_control_no_transform(
|
||||
headers: &hyper_v014::HeaderMap,
|
||||
) -> Option<bool> {
|
||||
let v = headers.get(hyper_v014::header::CACHE_CONTROL)?;
|
||||
let s = match std::str::from_utf8(v.as_bytes()) {
|
||||
Ok(s) => s,
|
||||
Err(_) => return Some(true),
|
||||
|
@ -824,15 +826,16 @@ fn should_compress(headers: &hyper::HeaderMap) -> bool {
|
|||
// we skip compression if the `content-range` header value is set, as it
|
||||
// indicates the contents of the body were negotiated based directly
|
||||
// with the user code and we can't compress the response
|
||||
let content_range = headers.contains_key(hyper::header::CONTENT_RANGE);
|
||||
let content_range = headers.contains_key(hyper_v014::header::CONTENT_RANGE);
|
||||
// assume body is already compressed if Content-Encoding header present, thus avoid recompressing
|
||||
let is_precompressed = headers.contains_key(hyper::header::CONTENT_ENCODING);
|
||||
let is_precompressed =
|
||||
headers.contains_key(hyper_v014::header::CONTENT_ENCODING);
|
||||
|
||||
!content_range
|
||||
&& !is_precompressed
|
||||
&& !cache_control_no_transform(headers).unwrap_or_default()
|
||||
&& headers
|
||||
.get(hyper::header::CONTENT_TYPE)
|
||||
.get(hyper_v014::header::CONTENT_TYPE)
|
||||
.map(compressible::is_content_compressible)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
@ -1016,7 +1019,7 @@ async fn op_http_upgrade_websocket(
|
|||
};
|
||||
|
||||
let (transport, bytes) =
|
||||
extract_network_stream(hyper::upgrade::on(request).await?);
|
||||
extract_network_stream(hyper_v014::upgrade::on(request).await?);
|
||||
let ws_rid =
|
||||
ws_create_server_stream(&mut state.borrow_mut(), transport, bytes)?;
|
||||
Ok(ws_rid)
|
||||
|
@ -1026,7 +1029,7 @@ async fn op_http_upgrade_websocket(
|
|||
#[derive(Clone)]
|
||||
struct LocalExecutor;
|
||||
|
||||
impl<Fut> hyper::rt::Executor<Fut> for LocalExecutor
|
||||
impl<Fut> hyper_v014::rt::Executor<Fut> for LocalExecutor
|
||||
where
|
||||
Fut: Future + 'static,
|
||||
Fut::Output: 'static,
|
||||
|
@ -1036,7 +1039,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<Fut> hyper1::rt::Executor<Fut> for LocalExecutor
|
||||
impl<Fut> hyper::rt::Executor<Fut> for LocalExecutor
|
||||
where
|
||||
Fut: Future + 'static,
|
||||
Fut::Output: 'static,
|
||||
|
@ -1052,8 +1055,8 @@ fn http_error(message: &'static str) -> AnyError {
|
|||
|
||||
/// Filters out the ever-surprising 'shutdown ENOTCONN' errors.
|
||||
fn filter_enotconn(
|
||||
result: Result<(), hyper::Error>,
|
||||
) -> Result<(), hyper::Error> {
|
||||
result: Result<(), hyper_v014::Error>,
|
||||
) -> Result<(), hyper_v014::Error> {
|
||||
if result
|
||||
.as_ref()
|
||||
.err()
|
||||
|
@ -1079,21 +1082,21 @@ trait CanDowncastUpgrade: Sized {
|
|||
) -> Result<(T, Bytes), Self>;
|
||||
}
|
||||
|
||||
impl CanDowncastUpgrade for hyper1::upgrade::Upgraded {
|
||||
impl CanDowncastUpgrade for hyper::upgrade::Upgraded {
|
||||
fn downcast<T: AsyncRead + AsyncWrite + Unpin + 'static>(
|
||||
self,
|
||||
) -> Result<(T, Bytes), Self> {
|
||||
let hyper1::upgrade::Parts { io, read_buf, .. } =
|
||||
let hyper::upgrade::Parts { io, read_buf, .. } =
|
||||
self.downcast::<TokioIo<T>>()?;
|
||||
Ok((io.into_inner(), read_buf))
|
||||
}
|
||||
}
|
||||
|
||||
impl CanDowncastUpgrade for hyper::upgrade::Upgraded {
|
||||
impl CanDowncastUpgrade for hyper_v014::upgrade::Upgraded {
|
||||
fn downcast<T: AsyncRead + AsyncWrite + Unpin + 'static>(
|
||||
self,
|
||||
) -> Result<(T, Bytes), Self> {
|
||||
let hyper::upgrade::Parts { io, read_buf, .. } = self.downcast()?;
|
||||
let hyper_v014::upgrade::Parts { io, read_buf, .. } = self.downcast()?;
|
||||
Ok((io, read_buf))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,9 +9,9 @@ use deno_core::AsyncResult;
|
|||
use deno_core::BufView;
|
||||
use deno_core::RcRef;
|
||||
use deno_core::Resource;
|
||||
use hyper1::body::Body;
|
||||
use hyper1::body::Incoming;
|
||||
use hyper1::body::SizeHint;
|
||||
use hyper::body::Body;
|
||||
use hyper::body::Incoming;
|
||||
use hyper::body::SizeHint;
|
||||
use std::borrow::Cow;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
|
|
|
@ -8,9 +8,9 @@ use deno_net::raw::NetworkStream;
|
|||
use deno_net::raw::NetworkStreamAddress;
|
||||
use deno_net::raw::NetworkStreamListener;
|
||||
use deno_net::raw::NetworkStreamType;
|
||||
use hyper1::header::HOST;
|
||||
use hyper1::HeaderMap;
|
||||
use hyper1::Uri;
|
||||
use hyper::header::HOST;
|
||||
use hyper::HeaderMap;
|
||||
use hyper::Uri;
|
||||
use std::borrow::Cow;
|
||||
use std::net::Ipv4Addr;
|
||||
use std::net::SocketAddr;
|
||||
|
|
|
@ -14,9 +14,9 @@ use deno_core::AsyncResult;
|
|||
use deno_core::BufView;
|
||||
use deno_core::Resource;
|
||||
use flate2::write::GzEncoder;
|
||||
use hyper1::body::Frame;
|
||||
use hyper1::body::SizeHint;
|
||||
use hyper1::header::HeaderMap;
|
||||
use hyper::body::Frame;
|
||||
use hyper::body::SizeHint;
|
||||
use hyper::header::HeaderMap;
|
||||
use pin_project::pin_project;
|
||||
|
||||
/// Simplification for nested types we use for our streams. We provide a way to convert from
|
||||
|
|
|
@ -7,13 +7,13 @@ use deno_core::futures::ready;
|
|||
use deno_core::BufView;
|
||||
use deno_core::OpState;
|
||||
use deno_core::ResourceId;
|
||||
use http_1::request::Parts;
|
||||
use hyper1::body::Body;
|
||||
use hyper1::body::Frame;
|
||||
use hyper1::body::Incoming;
|
||||
use hyper1::body::SizeHint;
|
||||
use hyper1::header::HeaderMap;
|
||||
use hyper1::upgrade::OnUpgrade;
|
||||
use http::request::Parts;
|
||||
use hyper::body::Body;
|
||||
use hyper::body::Frame;
|
||||
use hyper::body::Incoming;
|
||||
use hyper::body::SizeHint;
|
||||
use hyper::header::HeaderMap;
|
||||
use hyper::upgrade::OnUpgrade;
|
||||
|
||||
use scopeguard::guard;
|
||||
use scopeguard::ScopeGuard;
|
||||
|
@ -29,8 +29,8 @@ use std::task::Context;
|
|||
use std::task::Poll;
|
||||
use std::task::Waker;
|
||||
|
||||
pub type Request = hyper1::Request<Incoming>;
|
||||
pub type Response = hyper1::Response<HttpRecordResponse>;
|
||||
pub type Request = hyper::Request<Incoming>;
|
||||
pub type Response = hyper::Response<HttpRecordResponse>;
|
||||
|
||||
#[cfg(feature = "__http_tracing")]
|
||||
pub static RECORD_COUNT: std::sync::atomic::AtomicUsize =
|
||||
|
@ -181,7 +181,7 @@ pub(crate) async fn handle_request(
|
|||
request_info: HttpConnectionProperties,
|
||||
server_state: SignallingRc<HttpServerState>, // Keep server alive for duration of this future.
|
||||
tx: tokio::sync::mpsc::Sender<Rc<HttpRecord>>,
|
||||
) -> Result<Response, hyper::Error> {
|
||||
) -> Result<Response, hyper_v014::Error> {
|
||||
// If the underlying TCP connection is closed, this future will be dropped
|
||||
// and execution could stop at any await point.
|
||||
// The HttpRecord must live until JavaScript is done processing so is wrapped
|
||||
|
@ -209,9 +209,9 @@ pub(crate) async fn handle_request(
|
|||
struct HttpRecordInner {
|
||||
server_state: SignallingRc<HttpServerState>,
|
||||
request_info: HttpConnectionProperties,
|
||||
request_parts: http_1::request::Parts,
|
||||
request_parts: http::request::Parts,
|
||||
request_body: Option<RequestBodyState>,
|
||||
response_parts: Option<http_1::response::Parts>,
|
||||
response_parts: Option<http::response::Parts>,
|
||||
response_ready: bool,
|
||||
response_waker: Option<Waker>,
|
||||
response_body: ResponseBytesInner,
|
||||
|
@ -244,7 +244,7 @@ impl HttpRecord {
|
|||
) -> Rc<Self> {
|
||||
let (request_parts, request_body) = request.into_parts();
|
||||
let request_body = Some(request_body.into());
|
||||
let (mut response_parts, _) = http_1::Response::new(()).into_parts();
|
||||
let (mut response_parts, _) = http::Response::new(()).into_parts();
|
||||
let record =
|
||||
if let Some((record, headers)) = server_state.borrow_mut().pool.pop() {
|
||||
response_parts.headers = headers;
|
||||
|
@ -425,7 +425,7 @@ impl HttpRecord {
|
|||
}
|
||||
|
||||
/// Get a mutable reference to the response status and headers.
|
||||
pub fn response_parts(&self) -> RefMut<'_, http_1::response::Parts> {
|
||||
pub fn response_parts(&self) -> RefMut<'_, http::response::Parts> {
|
||||
RefMut::map(self.self_mut(), |inner| {
|
||||
inner.response_parts.as_mut().unwrap()
|
||||
})
|
||||
|
@ -594,18 +594,18 @@ mod tests {
|
|||
use crate::response_body::ResponseBytesInner;
|
||||
use bytes::Buf;
|
||||
use deno_net::raw::NetworkStreamType;
|
||||
use hyper1::body::Body;
|
||||
use hyper1::service::service_fn;
|
||||
use hyper1::service::HttpService;
|
||||
use hyper::body::Body;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::service::HttpService;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use std::error::Error as StdError;
|
||||
|
||||
/// Execute client request on service and concurrently map the response.
|
||||
async fn serve_request<B, S, T, F>(
|
||||
req: http_1::Request<B>,
|
||||
req: http::Request<B>,
|
||||
service: S,
|
||||
map_response: impl FnOnce(hyper1::Response<Incoming>) -> F,
|
||||
) -> hyper1::Result<T>
|
||||
map_response: impl FnOnce(hyper::Response<Incoming>) -> F,
|
||||
) -> hyper::Result<T>
|
||||
where
|
||||
B: Body + Send + 'static, // Send bound due to DuplexStream
|
||||
B::Data: Send,
|
||||
|
@ -614,10 +614,10 @@ mod tests {
|
|||
S::Error: Into<Box<dyn StdError + Send + Sync>>,
|
||||
S::ResBody: 'static,
|
||||
<S::ResBody as Body>::Error: Into<Box<dyn StdError + Send + Sync>>,
|
||||
F: std::future::Future<Output = hyper1::Result<T>>,
|
||||
F: std::future::Future<Output = hyper::Result<T>>,
|
||||
{
|
||||
use hyper1::client::conn::http1::handshake;
|
||||
use hyper1::server::conn::http1::Builder;
|
||||
use hyper::client::conn::http1::handshake;
|
||||
use hyper::server::conn::http1::Builder;
|
||||
let (stream_client, stream_server) = tokio::io::duplex(16 * 1024);
|
||||
let conn_server =
|
||||
Builder::new().serve_connection(TokioIo::new(stream_server), service);
|
||||
|
@ -646,7 +646,7 @@ mod tests {
|
|||
local_port: None,
|
||||
stream_type: NetworkStreamType::Tcp,
|
||||
};
|
||||
let svc = service_fn(move |req: hyper1::Request<Incoming>| {
|
||||
let svc = service_fn(move |req: hyper::Request<Incoming>| {
|
||||
handle_request(
|
||||
req,
|
||||
request_info.clone(),
|
||||
|
@ -655,8 +655,7 @@ mod tests {
|
|||
)
|
||||
});
|
||||
|
||||
let client_req =
|
||||
http_1::Request::builder().uri("/").body("".to_string())?;
|
||||
let client_req = http::Request::builder().uri("/").body("".to_string())?;
|
||||
|
||||
// Response produced by concurrent tasks
|
||||
tokio::try_join!(
|
||||
|
|
|
@ -6,9 +6,9 @@ use bytes::Bytes;
|
|||
use bytes::BytesMut;
|
||||
use deno_core::error::AnyError;
|
||||
use httparse::Status;
|
||||
use hyper1::header::HeaderName;
|
||||
use hyper1::header::HeaderValue;
|
||||
use hyper1::Response;
|
||||
use hyper::header::HeaderName;
|
||||
use hyper::header::HeaderValue;
|
||||
use hyper::Response;
|
||||
use memmem::Searcher;
|
||||
use memmem::TwoWaySearcher;
|
||||
use once_cell::sync::OnceCell;
|
||||
|
@ -152,7 +152,7 @@ impl<T: Default> WebSocketUpgrade<T> {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use hyper::Body;
|
||||
use hyper_v014::Body;
|
||||
|
||||
type ExpectedResponseAndHead = Option<(Response<Body>, &'static [u8])>;
|
||||
|
||||
|
|
|
@ -32,10 +32,10 @@ dsa = "0.6.1"
|
|||
ecb.workspace = true
|
||||
elliptic-curve.workspace = true
|
||||
errno = "0.2.8"
|
||||
h2.workspace = true
|
||||
h2 = { version = "0.3.17", features = ["unstable"] }
|
||||
hex.workspace = true
|
||||
hkdf.workspace = true
|
||||
http.workspace = true
|
||||
http_v02.workspace = true
|
||||
idna = "0.3.0"
|
||||
indexmap.workspace = true
|
||||
k256 = "0.13.1"
|
||||
|
|
|
@ -25,11 +25,11 @@ use deno_net::raw::take_network_stream_resource;
|
|||
use deno_net::raw::NetworkStream;
|
||||
use h2;
|
||||
use h2::RecvStream;
|
||||
use http;
|
||||
use http::request::Parts;
|
||||
use http::HeaderMap;
|
||||
use http::Response;
|
||||
use http::StatusCode;
|
||||
use http_v02;
|
||||
use http_v02::request::Parts;
|
||||
use http_v02::HeaderMap;
|
||||
use http_v02::Response;
|
||||
use http_v02::StatusCode;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use url::Url;
|
||||
|
@ -310,7 +310,7 @@ pub async fn op_http2_client_request(
|
|||
|
||||
let url = url.join(&pseudo_path)?;
|
||||
|
||||
let mut req = http::Request::builder()
|
||||
let mut req = http_v02::Request::builder()
|
||||
.uri(url.as_str())
|
||||
.method(pseudo_method.as_str());
|
||||
|
||||
|
@ -398,7 +398,7 @@ pub async fn op_http2_client_send_trailers(
|
|||
.get::<Http2ClientStream>(stream_rid)?;
|
||||
let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await;
|
||||
|
||||
let mut trailers_map = http::HeaderMap::new();
|
||||
let mut trailers_map = http_v02::HeaderMap::new();
|
||||
for (name, value) in trailers {
|
||||
trailers_map.insert(
|
||||
HeaderName::from_bytes(&name).unwrap(),
|
||||
|
|
|
@ -19,11 +19,11 @@ deno_core.workspace = true
|
|||
deno_net.workspace = true
|
||||
deno_tls.workspace = true
|
||||
fastwebsockets.workspace = true
|
||||
h2 = "0.4"
|
||||
http = "1.0"
|
||||
h2.workspace = true
|
||||
http.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper.workspace = true
|
||||
hyper-util.workspace = true
|
||||
hyper1.workspace = true
|
||||
once_cell.workspace = true
|
||||
rustls-tokio-stream.workspace = true
|
||||
serde.workspace = true
|
||||
|
|
|
@ -891,7 +891,7 @@ pub fn get_network_error_class_name(e: &AnyError) -> Option<&'static str> {
|
|||
#[derive(Clone)]
|
||||
struct LocalExecutor;
|
||||
|
||||
impl<Fut> hyper1::rt::Executor<Fut> for LocalExecutor
|
||||
impl<Fut> hyper::rt::Executor<Fut> for LocalExecutor
|
||||
where
|
||||
Fut: Future + 'static,
|
||||
Fut::Output: 'static,
|
||||
|
|
|
@ -4,7 +4,7 @@ use bytes::Bytes;
|
|||
use deno_net::raw::NetworkStream;
|
||||
use h2::RecvStream;
|
||||
use h2::SendStream;
|
||||
use hyper1::upgrade::Upgraded;
|
||||
use hyper::upgrade::Upgraded;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use std::io::ErrorKind;
|
||||
use std::pin::Pin;
|
||||
|
|
|
@ -102,10 +102,9 @@ filetime = "0.2.16"
|
|||
fs3.workspace = true
|
||||
http.workspace = true
|
||||
http-body-util.workspace = true
|
||||
http_1 = { package = "http", version = "1.0" }
|
||||
hyper = { workspace = true, features = ["server", "stream", "http1", "http2", "runtime"] }
|
||||
hyper.workspace = true
|
||||
hyper-util.workspace = true
|
||||
hyper1.workspace = true
|
||||
hyper_v014 = { workspace = true, features = ["server", "stream", "http1", "http2", "runtime"] }
|
||||
libc.workspace = true
|
||||
log.workspace = true
|
||||
netif = "0.1.6"
|
||||
|
|
|
@ -142,7 +142,7 @@ fn get_url_parse_error_class(_error: &url::ParseError) -> &'static str {
|
|||
"URIError"
|
||||
}
|
||||
|
||||
fn get_hyper_error_class(_error: &hyper::Error) -> &'static str {
|
||||
fn get_hyper_error_class(_error: &hyper_v014::Error) -> &'static str {
|
||||
"Http"
|
||||
}
|
||||
|
||||
|
@ -175,9 +175,12 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> {
|
|||
e.downcast_ref::<dlopen2::Error>()
|
||||
.map(get_dlopen_error_class)
|
||||
})
|
||||
.or_else(|| e.downcast_ref::<hyper::Error>().map(get_hyper_error_class))
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<Arc<hyper::Error>>()
|
||||
e.downcast_ref::<hyper_v014::Error>()
|
||||
.map(get_hyper_error_class)
|
||||
})
|
||||
.or_else(|| {
|
||||
e.downcast_ref::<Arc<hyper_v014::Error>>()
|
||||
.map(|e| get_hyper_error_class(e))
|
||||
})
|
||||
.or_else(|| {
|
||||
|
|
|
@ -104,11 +104,11 @@ impl Drop for InspectorServer {
|
|||
}
|
||||
|
||||
fn handle_ws_request(
|
||||
req: http_1::Request<hyper1::body::Incoming>,
|
||||
req: http::Request<hyper::body::Incoming>,
|
||||
inspector_map_rc: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
|
||||
) -> http_1::Result<http_1::Response<Box<http_body_util::Full<Bytes>>>> {
|
||||
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
|
||||
let (parts, body) = req.into_parts();
|
||||
let req = http_1::Request::from_parts(parts, ());
|
||||
let req = http::Request::from_parts(parts, ());
|
||||
|
||||
let maybe_uuid = req
|
||||
.uri()
|
||||
|
@ -117,8 +117,8 @@ fn handle_ws_request(
|
|||
.and_then(|s| Uuid::parse_str(s).ok());
|
||||
|
||||
if maybe_uuid.is_none() {
|
||||
return http_1::Response::builder()
|
||||
.status(http_1::StatusCode::BAD_REQUEST)
|
||||
return http::Response::builder()
|
||||
.status(http::StatusCode::BAD_REQUEST)
|
||||
.body(Box::new(Bytes::from("Malformed inspector UUID").into()));
|
||||
}
|
||||
|
||||
|
@ -128,8 +128,8 @@ fn handle_ws_request(
|
|||
let maybe_inspector_info = inspector_map.get(&maybe_uuid.unwrap());
|
||||
|
||||
if maybe_inspector_info.is_none() {
|
||||
return http_1::Response::builder()
|
||||
.status(http_1::StatusCode::NOT_FOUND)
|
||||
return http::Response::builder()
|
||||
.status(http::StatusCode::NOT_FOUND)
|
||||
.body(Box::new(Bytes::from("Invalid inspector UUID").into()));
|
||||
}
|
||||
|
||||
|
@ -137,20 +137,20 @@ fn handle_ws_request(
|
|||
info.new_session_tx.clone()
|
||||
};
|
||||
let (parts, _) = req.into_parts();
|
||||
let mut req = http_1::Request::from_parts(parts, body);
|
||||
let mut req = http::Request::from_parts(parts, body);
|
||||
|
||||
let (resp, fut) = match fastwebsockets::upgrade::upgrade(&mut req) {
|
||||
Ok((resp, fut)) => {
|
||||
let (parts, _body) = resp.into_parts();
|
||||
let resp = http_1::Response::from_parts(
|
||||
let resp = http::Response::from_parts(
|
||||
parts,
|
||||
Box::new(http_body_util::Full::new(Bytes::new())),
|
||||
);
|
||||
(resp, fut)
|
||||
}
|
||||
_ => {
|
||||
return http_1::Response::builder()
|
||||
.status(http_1::StatusCode::BAD_REQUEST)
|
||||
return http::Response::builder()
|
||||
.status(http::StatusCode::BAD_REQUEST)
|
||||
.body(Box::new(
|
||||
Bytes::from("Not a valid Websocket Request").into(),
|
||||
));
|
||||
|
@ -192,7 +192,7 @@ fn handle_ws_request(
|
|||
fn handle_json_request(
|
||||
inspector_map: Rc<RefCell<HashMap<Uuid, InspectorInfo>>>,
|
||||
host: Option<String>,
|
||||
) -> http_1::Result<http_1::Response<Box<http_body_util::Full<Bytes>>>> {
|
||||
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
|
||||
let data = inspector_map
|
||||
.borrow()
|
||||
.values()
|
||||
|
@ -200,22 +200,22 @@ fn handle_json_request(
|
|||
.collect::<Vec<_>>();
|
||||
let body: http_body_util::Full<Bytes> =
|
||||
Bytes::from(serde_json::to_string(&data).unwrap()).into();
|
||||
http_1::Response::builder()
|
||||
.status(http_1::StatusCode::OK)
|
||||
.header(http_1::header::CONTENT_TYPE, "application/json")
|
||||
http::Response::builder()
|
||||
.status(http::StatusCode::OK)
|
||||
.header(http::header::CONTENT_TYPE, "application/json")
|
||||
.body(Box::new(body))
|
||||
}
|
||||
|
||||
fn handle_json_version_request(
|
||||
version_response: Value,
|
||||
) -> http_1::Result<http_1::Response<Box<http_body_util::Full<Bytes>>>> {
|
||||
) -> http::Result<http::Response<Box<http_body_util::Full<Bytes>>>> {
|
||||
let body = Box::new(http_body_util::Full::from(
|
||||
serde_json::to_string(&version_response).unwrap(),
|
||||
));
|
||||
|
||||
http_1::Response::builder()
|
||||
.status(http_1::StatusCode::OK)
|
||||
.header(http_1::header::CONTENT_TYPE, "application/json")
|
||||
http::Response::builder()
|
||||
.status(http::StatusCode::OK)
|
||||
.header(http::header::CONTENT_TYPE, "application/json")
|
||||
.body(body)
|
||||
}
|
||||
|
||||
|
@ -296,8 +296,8 @@ async fn server(
|
|||
let json_version_response = json_version_response.clone();
|
||||
let mut shutdown_server_rx = shutdown_server_rx.resubscribe();
|
||||
|
||||
let service = hyper1::service::service_fn(
|
||||
move |req: http_1::Request<hyper1::body::Incoming>| {
|
||||
let service = hyper::service::service_fn(
|
||||
move |req: http::Request<hyper::body::Incoming>| {
|
||||
future::ready({
|
||||
// If the host header can make a valid URL, use it
|
||||
let host = req
|
||||
|
@ -311,20 +311,20 @@ async fn server(
|
|||
_ => None,
|
||||
});
|
||||
match (req.method(), req.uri().path()) {
|
||||
(&http_1::Method::GET, path) if path.starts_with("/ws/") => {
|
||||
(&http::Method::GET, path) if path.starts_with("/ws/") => {
|
||||
handle_ws_request(req, Rc::clone(&inspector_map))
|
||||
}
|
||||
(&http_1::Method::GET, "/json/version") => {
|
||||
(&http::Method::GET, "/json/version") => {
|
||||
handle_json_version_request(json_version_response.clone())
|
||||
}
|
||||
(&http_1::Method::GET, "/json") => {
|
||||
(&http::Method::GET, "/json") => {
|
||||
handle_json_request(Rc::clone(&inspector_map), host)
|
||||
}
|
||||
(&http_1::Method::GET, "/json/list") => {
|
||||
(&http::Method::GET, "/json/list") => {
|
||||
handle_json_request(Rc::clone(&inspector_map), host)
|
||||
}
|
||||
_ => http_1::Response::builder()
|
||||
.status(http_1::StatusCode::NOT_FOUND)
|
||||
_ => http::Response::builder()
|
||||
.status(http::StatusCode::NOT_FOUND)
|
||||
.body(Box::new(http_body_util::Full::new(Bytes::from(
|
||||
"Not Found",
|
||||
)))),
|
||||
|
@ -334,7 +334,7 @@ async fn server(
|
|||
);
|
||||
|
||||
deno_core::unsync::spawn(async move {
|
||||
let server = hyper1::server::conn::http1::Builder::new();
|
||||
let server = hyper::server::conn::http1::Builder::new();
|
||||
|
||||
let mut conn =
|
||||
pin!(server.serve_connection(io, service).with_upgrades());
|
||||
|
@ -376,7 +376,7 @@ async fn server(
|
|||
/// 'futures' crate, therefore they can't participate in Tokio's cooperative
|
||||
/// task yielding.
|
||||
async fn pump_websocket_messages(
|
||||
mut websocket: WebSocket<TokioIo<hyper1::upgrade::Upgraded>>,
|
||||
mut websocket: WebSocket<TokioIo<hyper::upgrade::Upgraded>>,
|
||||
inbound_tx: UnboundedSender<String>,
|
||||
mut outbound_rx: UnboundedReceiver<InspectorMsg>,
|
||||
) {
|
||||
|
|
|
@ -18,7 +18,7 @@ use deno_http::HttpStreamResource;
|
|||
use deno_net::io::TcpStreamResource;
|
||||
use deno_net::ops_tls::TlsStream;
|
||||
use deno_net::ops_tls::TlsStreamResource;
|
||||
use hyper::upgrade::Parts;
|
||||
use hyper_v014::upgrade::Parts;
|
||||
use serde::Serialize;
|
||||
use tokio::net::TcpStream;
|
||||
|
||||
|
@ -121,7 +121,7 @@ async fn op_http_upgrade(
|
|||
}
|
||||
};
|
||||
|
||||
let transport = hyper::upgrade::on(request).await?;
|
||||
let transport = hyper_v014::upgrade::on(request).await?;
|
||||
let transport = match transport.downcast::<TcpStream>() {
|
||||
Ok(Parts {
|
||||
io: tcp_stream,
|
||||
|
|
|
@ -25,11 +25,11 @@ fastwebsockets.workspace = true
|
|||
flate2 = { workspace = true, features = ["default"] }
|
||||
futures.workspace = true
|
||||
glob.workspace = true
|
||||
h2 = "0.4"
|
||||
http = "1.0"
|
||||
h2.workspace = true
|
||||
http.workspace = true
|
||||
http-body-util.workspace = true
|
||||
hyper.workspace = true
|
||||
hyper-util.workspace = true
|
||||
hyper1.workspace = true
|
||||
lazy-regex.workspace = true
|
||||
libc.workspace = true
|
||||
lsp-types.workspace = true
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
use futures::StreamExt;
|
||||
use h2;
|
||||
use hyper1::header::HeaderName;
|
||||
use hyper1::header::HeaderValue;
|
||||
use hyper::header::HeaderName;
|
||||
use hyper::header::HeaderValue;
|
||||
use rustls_tokio_stream::TlsStream;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::task::LocalSet;
|
||||
|
@ -48,7 +48,7 @@ pub async fn h2_grpc_server(h2_grpc_port: u16, h2s_grpc_port: u16) {
|
|||
}
|
||||
|
||||
async fn handle_request(
|
||||
mut request: hyper1::Request<h2::RecvStream>,
|
||||
mut request: hyper::Request<h2::RecvStream>,
|
||||
mut respond: h2::server::SendResponse<bytes::Bytes>,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let body = request.body_mut();
|
||||
|
@ -59,11 +59,11 @@ pub async fn h2_grpc_server(h2_grpc_port: u16, h2s_grpc_port: u16) {
|
|||
|
||||
let maybe_recv_trailers = body.trailers().await?;
|
||||
|
||||
let response = hyper1::Response::new(());
|
||||
let response = hyper::Response::new(());
|
||||
let mut send = respond.send_response(response, false)?;
|
||||
send.send_data(bytes::Bytes::from_static(b"hello "), false)?;
|
||||
send.send_data(bytes::Bytes::from_static(b"world\n"), false)?;
|
||||
let mut trailers = hyper1::HeaderMap::new();
|
||||
let mut trailers = hyper::HeaderMap::new();
|
||||
trailers.insert(
|
||||
HeaderName::from_static("abc"),
|
||||
HeaderValue::from_static("def"),
|
||||
|
|
|
@ -9,7 +9,6 @@ use http;
|
|||
use http::Request;
|
||||
use http::Response;
|
||||
use http_body_util::combinators::UnsyncBoxBody;
|
||||
use hyper1 as hyper;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use std::convert::Infallible;
|
||||
use std::io;
|
||||
|
@ -46,7 +45,7 @@ where
|
|||
loop {
|
||||
let (stream, _) = listener.accept().await?;
|
||||
let io = TokioIo::new(stream);
|
||||
deno_unsync::spawn(hyper1_serve_connection(
|
||||
deno_unsync::spawn(hyper_serve_connection(
|
||||
io,
|
||||
handler,
|
||||
options.error_msg,
|
||||
|
@ -76,7 +75,7 @@ pub async fn run_server_with_acceptor<'a, A, F, S>(
|
|||
while let Some(result) = acceptor.next().await {
|
||||
let stream = result?;
|
||||
let io = TokioIo::new(stream);
|
||||
deno_unsync::spawn(hyper1_serve_connection(
|
||||
deno_unsync::spawn(hyper_serve_connection(
|
||||
io, handler, error_msg, kind,
|
||||
));
|
||||
}
|
||||
|
@ -89,7 +88,7 @@ pub async fn run_server_with_acceptor<'a, A, F, S>(
|
|||
}
|
||||
}
|
||||
|
||||
async fn hyper1_serve_connection<I, F, S>(
|
||||
async fn hyper_serve_connection<I, F, S>(
|
||||
io: I,
|
||||
handler: F,
|
||||
error_msg: &'static str,
|
||||
|
@ -99,7 +98,7 @@ async fn hyper1_serve_connection<I, F, S>(
|
|||
F: Fn(Request<hyper::body::Incoming>) -> S + Copy + 'static,
|
||||
S: Future<Output = HandlerOutput> + 'static,
|
||||
{
|
||||
let service = hyper1::service::service_fn(handler);
|
||||
let service = hyper::service::service_fn(handler);
|
||||
|
||||
let result: Result<(), anyhow::Error> = match kind {
|
||||
ServerKind::Auto => {
|
||||
|
@ -111,7 +110,7 @@ async fn hyper1_serve_connection<I, F, S>(
|
|||
.map_err(|e| anyhow::anyhow!("{}", e))
|
||||
}
|
||||
ServerKind::OnlyHttp1 => {
|
||||
let builder = hyper1::server::conn::http1::Builder::new();
|
||||
let builder = hyper::server::conn::http1::Builder::new();
|
||||
builder
|
||||
.serve_connection(io, service)
|
||||
.await
|
||||
|
@ -119,7 +118,7 @@ async fn hyper1_serve_connection<I, F, S>(
|
|||
}
|
||||
ServerKind::OnlyHttp2 => {
|
||||
let builder =
|
||||
hyper1::server::conn::http2::Builder::new(DenoUnsyncExecutor);
|
||||
hyper::server::conn::http2::Builder::new(DenoUnsyncExecutor);
|
||||
builder
|
||||
.serve_connection(io, service)
|
||||
.await
|
||||
|
|
|
@ -24,7 +24,6 @@ use http_body_util::combinators::UnsyncBoxBody;
|
|||
use http_body_util::BodyExt;
|
||||
use http_body_util::Empty;
|
||||
use http_body_util::Full;
|
||||
use hyper1 as hyper;
|
||||
use pretty_assertions::assert_eq;
|
||||
use prost::Message;
|
||||
use std::collections::HashMap;
|
||||
|
|
|
@ -7,10 +7,10 @@ use bytes::Bytes;
|
|||
use http_body_util::combinators::UnsyncBoxBody;
|
||||
use http_body_util::Empty;
|
||||
use http_body_util::Full;
|
||||
use hyper1::body::Incoming;
|
||||
use hyper1::Request;
|
||||
use hyper1::Response;
|
||||
use hyper1::StatusCode;
|
||||
use hyper::body::Incoming;
|
||||
use hyper::Request;
|
||||
use hyper::Response;
|
||||
use hyper::StatusCode;
|
||||
use serde_json::json;
|
||||
use std::convert::Infallible;
|
||||
use std::net::SocketAddr;
|
||||
|
|
|
@ -15,11 +15,11 @@ use h2::server::Handshake;
|
|||
use h2::server::SendResponse;
|
||||
use h2::Reason;
|
||||
use h2::RecvStream;
|
||||
use hyper1::upgrade::Upgraded;
|
||||
use hyper1::Method;
|
||||
use hyper1::Request;
|
||||
use hyper1::Response;
|
||||
use hyper1::StatusCode;
|
||||
use hyper::upgrade::Upgraded;
|
||||
use hyper::Method;
|
||||
use hyper::Request;
|
||||
use hyper::Response;
|
||||
use hyper::StatusCode;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::pin::Pin;
|
||||
|
@ -126,8 +126,8 @@ fn spawn_ws_server<S>(stream: S, handler: WsHandler)
|
|||
where
|
||||
S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static,
|
||||
{
|
||||
let service = hyper1::service::service_fn(
|
||||
move |mut req: http::Request<hyper1::body::Incoming>| async move {
|
||||
let service = hyper::service::service_fn(
|
||||
move |mut req: http::Request<hyper::body::Incoming>| async move {
|
||||
let (response, upgrade_fut) = fastwebsockets::upgrade::upgrade(&mut req)
|
||||
.map_err(|e| anyhow!("Error upgrading websocket connection: {}", e))?;
|
||||
|
||||
|
@ -148,7 +148,7 @@ where
|
|||
|
||||
let io = TokioIo::new(stream);
|
||||
tokio::spawn(async move {
|
||||
let conn = hyper1::server::conn::http1::Builder::new()
|
||||
let conn = hyper::server::conn::http1::Builder::new()
|
||||
.serve_connection(io, service)
|
||||
.with_upgrades();
|
||||
|
||||
|
|
Loading…
Reference in a new issue