mirror of
https://github.com/denoland/deno.git
synced 2024-12-18 13:22:55 -05:00
refactor: use capacity_builder for writing deno compile data section (#27393)
This commit is contained in:
parent
95928c46eb
commit
59f263409e
4 changed files with 56 additions and 58 deletions
5
Cargo.lock
generated
5
Cargo.lock
generated
|
@ -670,9 +670,9 @@ checksum = "1bf2a5fb3207c12b5d208ebc145f967fea5cac41a021c37417ccc31ba40f39ee"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "capacity_builder"
|
name = "capacity_builder"
|
||||||
version = "0.1.0"
|
version = "0.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a2c0f637033edd76ceb881faaee372868a383f0ed7a4a59e8fdf90db2502f3d3"
|
checksum = "58ec49028cb308564429cd8fac4ef21290067a0afe8f5955330a8d487d0d790c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
]
|
]
|
||||||
|
@ -1224,6 +1224,7 @@ dependencies = [
|
||||||
"boxed_error",
|
"boxed_error",
|
||||||
"bytes",
|
"bytes",
|
||||||
"cache_control",
|
"cache_control",
|
||||||
|
"capacity_builder",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap",
|
"clap",
|
||||||
"clap_complete",
|
"clap_complete",
|
||||||
|
|
|
@ -108,7 +108,7 @@ boxed_error = "0.2.3"
|
||||||
brotli = "6.0.0"
|
brotli = "6.0.0"
|
||||||
bytes = "1.4.0"
|
bytes = "1.4.0"
|
||||||
cache_control = "=0.2.0"
|
cache_control = "=0.2.0"
|
||||||
capacity_builder = "0.1.0"
|
capacity_builder = "0.1.3"
|
||||||
cbc = { version = "=0.1.2", features = ["alloc"] }
|
cbc = { version = "=0.1.2", features = ["alloc"] }
|
||||||
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
|
# Note: Do not use the "clock" feature of chrono, as it links us to CoreFoundation on macOS.
|
||||||
# Instead use util::time::utc_now()
|
# Instead use util::time::utc_now()
|
||||||
|
|
|
@ -97,6 +97,7 @@ bincode = "=1.3.3"
|
||||||
boxed_error.workspace = true
|
boxed_error.workspace = true
|
||||||
bytes.workspace = true
|
bytes.workspace = true
|
||||||
cache_control.workspace = true
|
cache_control.workspace = true
|
||||||
|
capacity_builder.workspace = true
|
||||||
chrono = { workspace = true, features = ["now"] }
|
chrono = { workspace = true, features = ["now"] }
|
||||||
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
clap = { version = "=4.5.16", features = ["env", "string", "wrap_help", "error-context"] }
|
||||||
clap_complete = "=4.5.24"
|
clap_complete = "=4.5.24"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::cell::Cell;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
@ -42,50 +43,46 @@ pub fn serialize_binary_data_section(
|
||||||
remote_modules: &RemoteModulesStoreBuilder,
|
remote_modules: &RemoteModulesStoreBuilder,
|
||||||
vfs: &BuiltVfs,
|
vfs: &BuiltVfs,
|
||||||
) -> Result<Vec<u8>, AnyError> {
|
) -> Result<Vec<u8>, AnyError> {
|
||||||
fn write_bytes_with_len(bytes: &mut Vec<u8>, data: &[u8]) {
|
let metadata = serde_json::to_string(metadata)?;
|
||||||
bytes.extend_from_slice(&(data.len() as u64).to_le_bytes());
|
let npm_snapshot =
|
||||||
bytes.extend_from_slice(data);
|
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
||||||
}
|
let remote_modules_len = Cell::new(0_u64);
|
||||||
|
let serialized_vfs = serde_json::to_string(&vfs.root)?;
|
||||||
|
|
||||||
let mut bytes = Vec::new();
|
let bytes = capacity_builder::BytesBuilder::build(|builder| {
|
||||||
bytes.extend_from_slice(MAGIC_BYTES);
|
builder.append(MAGIC_BYTES);
|
||||||
|
// 1. Metadata
|
||||||
// 1. Metadata
|
{
|
||||||
{
|
builder.append_le(metadata.len() as u64);
|
||||||
let metadata = serde_json::to_string(metadata)?;
|
builder.append(&metadata);
|
||||||
write_bytes_with_len(&mut bytes, metadata.as_bytes());
|
}
|
||||||
}
|
// 2. Npm snapshot
|
||||||
// 2. Npm snapshot
|
{
|
||||||
{
|
builder.append_le(npm_snapshot.len() as u64);
|
||||||
let npm_snapshot =
|
builder.append(&npm_snapshot);
|
||||||
npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default();
|
}
|
||||||
write_bytes_with_len(&mut bytes, &npm_snapshot);
|
// 3. Remote modules
|
||||||
}
|
{
|
||||||
// 3. Remote modules
|
builder.append_le(remote_modules_len.get()); // this will be properly initialized on the second pass
|
||||||
{
|
let start_index = builder.len();
|
||||||
let update_index = bytes.len();
|
remote_modules.write(builder);
|
||||||
bytes.extend_from_slice(&(0_u64).to_le_bytes());
|
remote_modules_len.set((builder.len() - start_index) as u64);
|
||||||
let start_index = bytes.len();
|
}
|
||||||
remote_modules.write(&mut bytes)?;
|
// 4. VFS
|
||||||
let length = bytes.len() - start_index;
|
{
|
||||||
let length_bytes = (length as u64).to_le_bytes();
|
builder.append_le(serialized_vfs.len() as u64);
|
||||||
bytes[update_index..update_index + length_bytes.len()]
|
builder.append(&serialized_vfs);
|
||||||
.copy_from_slice(&length_bytes);
|
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
|
||||||
}
|
builder.append_le(vfs_bytes_len);
|
||||||
// 4. VFS
|
for file in &vfs.files {
|
||||||
{
|
builder.append(file);
|
||||||
let serialized_vfs = serde_json::to_string(&vfs.root)?;
|
}
|
||||||
write_bytes_with_len(&mut bytes, serialized_vfs.as_bytes());
|
|
||||||
let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::<u64>();
|
|
||||||
bytes.extend_from_slice(&vfs_bytes_len.to_le_bytes());
|
|
||||||
for file in &vfs.files {
|
|
||||||
bytes.extend_from_slice(file);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// write the magic bytes at the end so we can use it
|
// write the magic bytes at the end so we can use it
|
||||||
// to make sure we've deserialized correctly
|
// to make sure we've deserialized correctly
|
||||||
bytes.extend_from_slice(MAGIC_BYTES);
|
builder.append(MAGIC_BYTES);
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(bytes)
|
Ok(bytes)
|
||||||
}
|
}
|
||||||
|
@ -191,26 +188,25 @@ impl RemoteModulesStoreBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write(&self, writer: &mut dyn Write) -> Result<(), AnyError> {
|
fn write<'a>(&'a self, builder: &mut capacity_builder::BytesBuilder<'a>) {
|
||||||
writer.write_all(&(self.specifiers.len() as u32).to_le_bytes())?;
|
builder.append_le(self.specifiers.len() as u32);
|
||||||
writer.write_all(&(self.redirects.len() as u32).to_le_bytes())?;
|
builder.append_le(self.redirects.len() as u32);
|
||||||
for (specifier, offset) in &self.specifiers {
|
for (specifier, offset) in &self.specifiers {
|
||||||
writer.write_all(&(specifier.len() as u32).to_le_bytes())?;
|
builder.append_le(specifier.len() as u32);
|
||||||
writer.write_all(specifier.as_bytes())?;
|
builder.append(specifier.as_bytes());
|
||||||
writer.write_all(&offset.to_le_bytes())?;
|
builder.append_le(*offset);
|
||||||
}
|
}
|
||||||
for (from, to) in &self.redirects {
|
for (from, to) in &self.redirects {
|
||||||
writer.write_all(&(from.len() as u32).to_le_bytes())?;
|
builder.append_le(from.len() as u32);
|
||||||
writer.write_all(from.as_bytes())?;
|
builder.append(from);
|
||||||
writer.write_all(&(to.len() as u32).to_le_bytes())?;
|
builder.append_le(to.len() as u32);
|
||||||
writer.write_all(to.as_bytes())?;
|
builder.append(to);
|
||||||
}
|
}
|
||||||
for (media_type, data) in &self.data {
|
for (media_type, data) in &self.data {
|
||||||
writer.write_all(&[serialize_media_type(*media_type)])?;
|
builder.append(serialize_media_type(*media_type));
|
||||||
writer.write_all(&(data.len() as u64).to_le_bytes())?;
|
builder.append_le(data.len() as u64);
|
||||||
writer.write_all(data)?;
|
builder.append(data);
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue