2023-01-02 16:00:42 -05:00
|
|
|
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
|
2021-09-08 05:14:29 -04:00
|
|
|
mod urlpattern;
|
|
|
|
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
use deno_core::error::type_error;
|
|
|
|
use deno_core::error::AnyError;
|
2021-04-28 12:41:50 -04:00
|
|
|
use deno_core::include_js_files;
|
2022-03-14 13:44:15 -04:00
|
|
|
use deno_core::op;
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
use deno_core::url::form_urlencoded;
|
|
|
|
use deno_core::url::quirks;
|
|
|
|
use deno_core::url::Url;
|
2021-04-28 12:41:50 -04:00
|
|
|
use deno_core::Extension;
|
2022-09-09 23:45:16 -04:00
|
|
|
use deno_core::OpState;
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
use deno_core::ZeroCopyBuf;
|
2022-03-16 20:25:44 -04:00
|
|
|
use std::path::PathBuf;
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
|
2021-09-08 05:14:29 -04:00
|
|
|
use crate::urlpattern::op_urlpattern_parse;
|
|
|
|
use crate::urlpattern::op_urlpattern_process_match_input;
|
|
|
|
|
2021-04-28 12:41:50 -04:00
|
|
|
pub fn init() -> Extension {
|
2023-01-08 17:48:46 -05:00
|
|
|
Extension::builder(env!("CARGO_PKG_NAME"))
|
|
|
|
.dependencies(vec!["deno_webidl"])
|
2021-04-28 18:16:45 -04:00
|
|
|
.js(include_js_files!(
|
2021-08-11 06:27:05 -04:00
|
|
|
prefix "deno:ext/url",
|
2021-04-28 12:41:50 -04:00
|
|
|
"00_url.js",
|
2021-09-08 05:14:29 -04:00
|
|
|
"01_urlpattern.js",
|
2021-04-28 18:16:45 -04:00
|
|
|
))
|
|
|
|
.ops(vec![
|
2022-03-14 13:44:15 -04:00
|
|
|
op_url_reparse::decl(),
|
2022-09-09 23:45:16 -04:00
|
|
|
op_url_parse::decl(),
|
|
|
|
op_url_get_serialization::decl(),
|
|
|
|
op_url_parse_with_base::decl(),
|
2022-03-14 13:44:15 -04:00
|
|
|
op_url_parse_search_params::decl(),
|
|
|
|
op_url_stringify_search_params::decl(),
|
|
|
|
op_urlpattern_parse::decl(),
|
|
|
|
op_urlpattern_process_match_input::decl(),
|
2021-04-28 18:16:45 -04:00
|
|
|
])
|
|
|
|
.build()
|
2021-04-28 12:41:50 -04:00
|
|
|
}
|
|
|
|
|
2022-09-09 23:45:16 -04:00
|
|
|
/// Parse `href` with a `base_href`. Fills the out `buf` with URL components.
|
2022-03-14 13:44:15 -04:00
|
|
|
#[op]
|
2022-09-09 23:45:16 -04:00
|
|
|
pub fn op_url_parse_with_base(
|
|
|
|
state: &mut OpState,
|
2022-12-15 10:26:10 -05:00
|
|
|
href: String,
|
|
|
|
base_href: String,
|
|
|
|
buf: &mut [u8],
|
2022-09-09 23:45:16 -04:00
|
|
|
) -> u32 {
|
2022-12-15 10:26:10 -05:00
|
|
|
let base_url = match Url::parse(&base_href) {
|
2022-09-09 23:45:16 -04:00
|
|
|
Ok(url) => url,
|
|
|
|
Err(_) => return ParseStatus::Err as u32,
|
|
|
|
};
|
|
|
|
parse_url(state, href, Some(&base_url), buf)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[repr(u32)]
|
|
|
|
pub enum ParseStatus {
|
|
|
|
Ok = 0,
|
|
|
|
OkSerialization = 1,
|
|
|
|
Err,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct UrlSerialization(String);
|
|
|
|
|
|
|
|
#[op]
|
|
|
|
pub fn op_url_get_serialization(state: &mut OpState) -> String {
|
|
|
|
state.take::<UrlSerialization>().0
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Parse `href` without a `base_url`. Fills the out `buf` with URL components.
|
2022-12-15 10:26:10 -05:00
|
|
|
#[op]
|
|
|
|
pub fn op_url_parse(state: &mut OpState, href: String, buf: &mut [u8]) -> u32 {
|
2022-09-09 23:45:16 -04:00
|
|
|
parse_url(state, href, None, buf)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// `op_url_parse` and `op_url_parse_with_base` share the same implementation.
|
|
|
|
///
|
|
|
|
/// This function is used to parse the URL and fill the `buf` with internal
|
|
|
|
/// offset values of the URL components.
|
|
|
|
///
|
|
|
|
/// If the serialized URL is the same as the input URL, then `UrlSerialization` is
|
|
|
|
/// not set and returns `ParseStatus::Ok`.
|
|
|
|
///
|
|
|
|
/// If the serialized URL is different from the input URL, then `UrlSerialization` is
|
|
|
|
/// set and returns `ParseStatus::OkSerialization`. JS side should check status and
|
|
|
|
/// use `op_url_get_serialization` to get the serialized URL.
|
|
|
|
///
|
|
|
|
/// If the URL is invalid, then `UrlSerialization` is not set and returns `ParseStatus::Err`.
|
|
|
|
///
|
|
|
|
/// ```js
|
|
|
|
/// const buf = new Uint32Array(8);
|
|
|
|
/// const status = op_url_parse("http://example.com", buf.buffer);
|
|
|
|
/// let serializedUrl = "";
|
|
|
|
/// if (status === ParseStatus.Ok) {
|
|
|
|
/// serializedUrl = "http://example.com";
|
|
|
|
/// } else if (status === ParseStatus.OkSerialization) {
|
|
|
|
/// serializedUrl = op_url_get_serialization();
|
|
|
|
/// }
|
|
|
|
/// ```
|
|
|
|
#[inline]
|
|
|
|
fn parse_url(
|
|
|
|
state: &mut OpState,
|
2022-12-15 10:26:10 -05:00
|
|
|
href: String,
|
2022-09-09 23:45:16 -04:00
|
|
|
base_href: Option<&Url>,
|
2022-12-15 10:26:10 -05:00
|
|
|
buf: &mut [u8],
|
2022-09-09 23:45:16 -04:00
|
|
|
) -> u32 {
|
2022-12-15 10:26:10 -05:00
|
|
|
match Url::options().base_url(base_href).parse(&href) {
|
2022-09-09 23:45:16 -04:00
|
|
|
Ok(url) => {
|
|
|
|
let inner_url = quirks::internal_components(&url);
|
|
|
|
|
2022-12-15 10:26:10 -05:00
|
|
|
let buf: &mut [u32] = as_u32_slice(buf);
|
2022-09-09 23:45:16 -04:00
|
|
|
buf[0] = inner_url.scheme_end;
|
|
|
|
buf[1] = inner_url.username_end;
|
|
|
|
buf[2] = inner_url.host_start;
|
|
|
|
buf[3] = inner_url.host_end;
|
|
|
|
buf[4] = inner_url.port.unwrap_or(0) as u32;
|
|
|
|
buf[5] = inner_url.path_start;
|
|
|
|
buf[6] = inner_url.query_start.unwrap_or(0);
|
|
|
|
buf[7] = inner_url.fragment_start.unwrap_or(0);
|
|
|
|
let serialization: String = url.into();
|
|
|
|
if serialization != href {
|
|
|
|
state.put(UrlSerialization(serialization));
|
|
|
|
ParseStatus::OkSerialization as u32
|
|
|
|
} else {
|
|
|
|
ParseStatus::Ok as u32
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(_) => ParseStatus::Err as u32,
|
|
|
|
}
|
2021-08-18 17:21:33 -04:00
|
|
|
}
|
|
|
|
|
2022-09-19 04:25:03 -04:00
|
|
|
#[derive(Eq, PartialEq, Debug)]
|
2021-08-18 17:21:33 -04:00
|
|
|
#[repr(u8)]
|
|
|
|
pub enum UrlSetter {
|
2022-04-03 08:42:38 -04:00
|
|
|
Hash = 0,
|
|
|
|
Host = 1,
|
|
|
|
Hostname = 2,
|
|
|
|
Password = 3,
|
|
|
|
Pathname = 4,
|
|
|
|
Port = 5,
|
|
|
|
Protocol = 6,
|
|
|
|
Search = 7,
|
|
|
|
Username = 8,
|
2021-08-18 17:21:33 -04:00
|
|
|
}
|
|
|
|
|
2022-09-09 23:45:16 -04:00
|
|
|
const NO_PORT: u32 = 65536;
|
|
|
|
|
|
|
|
fn as_u32_slice(slice: &mut [u8]) -> &mut [u32] {
|
|
|
|
assert_eq!(slice.len() % std::mem::size_of::<u32>(), 0);
|
|
|
|
// SAFETY: size is multiple of 4
|
|
|
|
unsafe {
|
|
|
|
std::slice::from_raw_parts_mut(
|
|
|
|
slice.as_mut_ptr() as *mut u32,
|
|
|
|
slice.len() / std::mem::size_of::<u32>(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-14 13:44:15 -04:00
|
|
|
#[op]
|
2021-08-18 17:21:33 -04:00
|
|
|
pub fn op_url_reparse(
|
2022-09-09 23:45:16 -04:00
|
|
|
state: &mut OpState,
|
2021-08-18 17:21:33 -04:00
|
|
|
href: String,
|
2022-09-09 23:45:16 -04:00
|
|
|
setter: u8,
|
|
|
|
setter_value: String,
|
|
|
|
buf: &mut [u8],
|
|
|
|
) -> u32 {
|
|
|
|
let mut url = match Url::options().parse(&href) {
|
|
|
|
Ok(url) => url,
|
|
|
|
Err(_) => return ParseStatus::Err as u32,
|
|
|
|
};
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
|
2022-04-03 08:42:38 -04:00
|
|
|
if setter > 8 {
|
2022-09-09 23:45:16 -04:00
|
|
|
return ParseStatus::Err as u32;
|
2022-04-03 08:42:38 -04:00
|
|
|
}
|
|
|
|
// SAFETY: checked to be less than 9.
|
|
|
|
let setter = unsafe { std::mem::transmute::<u8, UrlSetter>(setter) };
|
2021-08-18 17:21:33 -04:00
|
|
|
let value = setter_value.as_ref();
|
2022-09-09 23:45:16 -04:00
|
|
|
let e = match setter {
|
|
|
|
UrlSetter::Hash => {
|
|
|
|
quirks::set_hash(&mut url, value);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
UrlSetter::Host => quirks::set_host(&mut url, value),
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
|
2022-09-09 23:45:16 -04:00
|
|
|
UrlSetter::Hostname => quirks::set_hostname(&mut url, value),
|
|
|
|
|
|
|
|
UrlSetter::Password => quirks::set_password(&mut url, value),
|
2021-08-18 17:21:33 -04:00
|
|
|
|
2022-09-09 23:45:16 -04:00
|
|
|
UrlSetter::Pathname => {
|
|
|
|
quirks::set_pathname(&mut url, value);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
UrlSetter::Port => quirks::set_port(&mut url, value),
|
|
|
|
|
|
|
|
UrlSetter::Protocol => quirks::set_protocol(&mut url, value),
|
|
|
|
UrlSetter::Search => {
|
|
|
|
quirks::set_search(&mut url, value);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
UrlSetter::Username => quirks::set_username(&mut url, value),
|
|
|
|
};
|
|
|
|
|
|
|
|
match e {
|
|
|
|
Ok(_) => {
|
|
|
|
let inner_url = quirks::internal_components(&url);
|
|
|
|
|
|
|
|
let buf: &mut [u32] = as_u32_slice(buf);
|
|
|
|
buf[0] = inner_url.scheme_end;
|
|
|
|
buf[1] = inner_url.username_end;
|
|
|
|
buf[2] = inner_url.host_start;
|
|
|
|
buf[3] = inner_url.host_end;
|
|
|
|
buf[4] = inner_url.port.map(|p| p as u32).unwrap_or(NO_PORT);
|
|
|
|
buf[5] = inner_url.path_start;
|
|
|
|
buf[6] = inner_url.query_start.unwrap_or(0);
|
|
|
|
buf[7] = inner_url.fragment_start.unwrap_or(0);
|
|
|
|
let serialization: String = url.into();
|
|
|
|
if serialization != href {
|
|
|
|
state.put(UrlSerialization(serialization));
|
|
|
|
ParseStatus::OkSerialization as u32
|
|
|
|
} else {
|
|
|
|
ParseStatus::Ok as u32
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(_) => ParseStatus::Err as u32,
|
|
|
|
}
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
}
|
|
|
|
|
2022-03-14 13:44:15 -04:00
|
|
|
#[op]
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
pub fn op_url_parse_search_params(
|
2021-04-20 08:47:22 -04:00
|
|
|
args: Option<String>,
|
|
|
|
zero_copy: Option<ZeroCopyBuf>,
|
2021-04-05 12:40:24 -04:00
|
|
|
) -> Result<Vec<(String, String)>, AnyError> {
|
2021-04-20 08:47:22 -04:00
|
|
|
let params = match (args, zero_copy) {
|
|
|
|
(None, Some(zero_copy)) => form_urlencoded::parse(&zero_copy)
|
|
|
|
.into_iter()
|
|
|
|
.map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))
|
|
|
|
.collect(),
|
|
|
|
(Some(args), None) => form_urlencoded::parse(args.as_bytes())
|
|
|
|
.into_iter()
|
|
|
|
.map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))
|
|
|
|
.collect(),
|
|
|
|
_ => return Err(type_error("invalid parameters")),
|
|
|
|
};
|
|
|
|
Ok(params)
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
}
|
|
|
|
|
2022-03-14 13:44:15 -04:00
|
|
|
#[op]
|
2022-05-13 04:36:31 -04:00
|
|
|
pub fn op_url_stringify_search_params(args: Vec<(String, String)>) -> String {
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
let search = form_urlencoded::Serializer::new(String::new())
|
2021-03-17 17:33:29 -04:00
|
|
|
.extend_pairs(args)
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
.finish();
|
2022-05-13 04:36:31 -04:00
|
|
|
search
|
chore: split web op crate (#9635)
This commit starts splitting out the deno_web op crate into multiple
smaller crates. This commit splits out WebIDL and URL API, but in the
future I want to split out each spec into its own crate. That means we
will have (in rough order of loading): `webidl`, `dom`, `streams`,
`console`, `encoding`, `url`, `file`, `fetch`, `websocket`, and
`webgpu` crates.
2021-03-12 10:17:18 -05:00
|
|
|
}
|
2022-03-16 20:25:44 -04:00
|
|
|
|
|
|
|
pub fn get_declaration() -> PathBuf {
|
|
|
|
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_url.d.ts")
|
|
|
|
}
|