1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-24 15:19:26 -05:00

chore: update to Rust 1.75 (#21731)

This commit is contained in:
林炳权 2024-01-02 06:22:48 +08:00 committed by GitHub
parent 7e72f3af61
commit 96b581bdd2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 25 additions and 31 deletions

View file

@ -5,7 +5,7 @@ import * as yaml from "https://deno.land/std@0.173.0/encoding/yaml.ts";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 65; const cacheVersion = 66;
const ubuntuRunner = "ubuntu-22.04"; const ubuntuRunner = "ubuntu-22.04";
const ubuntuXlRunner = "ubuntu-22.04-xl"; const ubuntuXlRunner = "ubuntu-22.04-xl";

View file

@ -329,8 +329,8 @@ jobs:
path: |- path: |-
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
key: '65-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}' key: '66-cargo-home-${{ matrix.os }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '65-cargo-home-${{ matrix.os }}' restore-keys: '66-cargo-home-${{ matrix.os }}'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- name: Restore cache build output (PR) - name: Restore cache build output (PR)
uses: actions/cache/restore@v3 uses: actions/cache/restore@v3
@ -342,7 +342,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '65-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '66-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -639,7 +639,7 @@ jobs:
!./target/*/gn_out !./target/*/gn_out
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '65-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '66-cargo-target-${{ matrix.os }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
publish-canary: publish-canary:
name: publish canary name: publish canary
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04

View file

@ -17,9 +17,7 @@ use deno_semver::npm::NpmPackageReqReference;
use indexmap::IndexMap; use indexmap::IndexMap;
pub use deno_config::BenchConfig; pub use deno_config::BenchConfig;
pub use deno_config::CompilerOptions;
pub use deno_config::ConfigFile; pub use deno_config::ConfigFile;
pub use deno_config::EmitConfigOptions;
pub use deno_config::FilesConfig; pub use deno_config::FilesConfig;
pub use deno_config::FmtOptionsConfig; pub use deno_config::FmtOptionsConfig;
pub use deno_config::JsxImportSourceConfig; pub use deno_config::JsxImportSourceConfig;

2
cli/cache/mod.rs vendored
View file

@ -45,7 +45,6 @@ pub use disk_cache::DiskCache;
pub use emit::EmitCache; pub use emit::EmitCache;
pub use incremental::IncrementalCache; pub use incremental::IncrementalCache;
pub use module_info::ModuleInfoCache; pub use module_info::ModuleInfoCache;
pub use module_info::ModuleInfoCacheModuleAnalyzer;
pub use node::NodeAnalysisCache; pub use node::NodeAnalysisCache;
pub use parsed_source::ParsedSourceCache; pub use parsed_source::ParsedSourceCache;
@ -95,7 +94,6 @@ pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache<RealDenoCacheEnv>;
pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>; pub type LocalHttpCache = deno_cache_dir::LocalHttpCache<RealDenoCacheEnv>;
pub type LocalLspHttpCache = pub type LocalLspHttpCache =
deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>; deno_cache_dir::LocalLspHttpCache<RealDenoCacheEnv>;
pub use deno_cache_dir::CachedUrlMetadata;
pub use deno_cache_dir::HttpCache; pub use deno_cache_dir::HttpCache;
/// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides /// A "wrapper" for the FileFetcher and DiskCache for the Deno CLI that provides

View file

@ -423,11 +423,11 @@ fn fix_ts_import_action(
if action.fix_name == "import" { if action.fix_name == "import" {
let change = action let change = action
.changes .changes
.get(0) .first()
.ok_or_else(|| anyhow!("Unexpected action changes."))?; .ok_or_else(|| anyhow!("Unexpected action changes."))?;
let text_change = change let text_change = change
.text_changes .text_changes
.get(0) .first()
.ok_or_else(|| anyhow!("Missing text change."))?; .ok_or_else(|| anyhow!("Missing text change."))?;
if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text) if let Some(captures) = IMPORT_SPECIFIER_RE.captures(&text_change.new_text)
{ {

View file

@ -98,7 +98,7 @@ impl DenoTestCollector {
} }
fn check_call_expr(&mut self, node: &ast::CallExpr, range: &SourceRange) { fn check_call_expr(&mut self, node: &ast::CallExpr, range: &SourceRange) {
if let Some(expr) = node.args.get(0).map(|es| es.expr.as_ref()) { if let Some(expr) = node.args.first().map(|es| es.expr.as_ref()) {
match expr { match expr {
ast::Expr::Object(obj_lit) => { ast::Expr::Object(obj_lit) => {
for prop in &obj_lit.props { for prop in &obj_lit.props {

View file

@ -932,7 +932,7 @@ impl Config {
} }
pub fn root_uri(&self) -> Option<&Url> { pub fn root_uri(&self) -> Option<&Url> {
self.workspace_folders.get(0).map(|p| &p.0) self.workspace_folders.first().map(|p| &p.0)
} }
pub fn maybe_node_modules_dir_path(&self) -> Option<&PathBuf> { pub fn maybe_node_modules_dir_path(&self) -> Option<&PathBuf> {
@ -1184,7 +1184,7 @@ fn specifier_enabled(
return true; return true;
}; };
let (settings, mut folder_uri) = settings.get_for_specifier(specifier); let (settings, mut folder_uri) = settings.get_for_specifier(specifier);
folder_uri = folder_uri.or_else(|| workspace_folders.get(0).map(|f| &f.0)); folder_uri = folder_uri.or_else(|| workspace_folders.first().map(|f| &f.0));
let mut disable_paths = vec![]; let mut disable_paths = vec![];
let mut enable_paths = None; let mut enable_paths = None;
if let Some(folder_uri) = folder_uri { if let Some(folder_uri) = folder_uri {

View file

@ -23,7 +23,7 @@ fn visit_arrow(
test_module: &mut TestModule, test_module: &mut TestModule,
) { ) {
if let Some((maybe_test_context, maybe_step_var)) = if let Some((maybe_test_context, maybe_step_var)) =
parse_test_context_param(arrow_expr.params.get(0)) parse_test_context_param(arrow_expr.params.first())
{ {
let mut collector = TestStepCollector::new( let mut collector = TestStepCollector::new(
maybe_test_context, maybe_test_context,
@ -44,7 +44,7 @@ fn visit_fn(
test_module: &mut TestModule, test_module: &mut TestModule,
) { ) {
if let Some((maybe_test_context, maybe_step_var)) = if let Some((maybe_test_context, maybe_step_var)) =
parse_test_context_param(function.params.get(0).map(|p| &p.pat)) parse_test_context_param(function.params.first().map(|p| &p.pat))
{ {
let mut collector = TestStepCollector::new( let mut collector = TestStepCollector::new(
maybe_test_context, maybe_test_context,
@ -136,7 +136,7 @@ fn visit_call_expr(
text_info: &SourceTextInfo, text_info: &SourceTextInfo,
test_module: &mut TestModule, test_module: &mut TestModule,
) { ) {
if let Some(expr) = node.args.get(0).map(|es| es.expr.as_ref()) { if let Some(expr) = node.args.first().map(|es| es.expr.as_ref()) {
match expr { match expr {
ast::Expr::Object(obj_lit) => { ast::Expr::Object(obj_lit) => {
let mut maybe_name = None; let mut maybe_name = None;

View file

@ -4481,7 +4481,7 @@ fn test_lsp_code_actions_ordering() {
let action = action.as_object_mut().unwrap(); let action = action.as_object_mut().unwrap();
let title = action.get("title").unwrap().as_str().unwrap().to_string(); let title = action.get("title").unwrap().as_str().unwrap().to_string();
let diagnostics = action.get("diagnostics").unwrap().as_array().unwrap(); let diagnostics = action.get("diagnostics").unwrap().as_array().unwrap();
let diagnostic = diagnostics.get(0).unwrap().as_object().unwrap(); let diagnostic = diagnostics.first().unwrap().as_object().unwrap();
let source = diagnostic.get("source").unwrap(); let source = diagnostic.get("source").unwrap();
let source = source.as_str().unwrap().to_string(); let source = source.as_str().unwrap().to_string();
action.clear(); action.clear();

View file

@ -347,7 +347,7 @@ fn merge_range_tree_children<'a>(
let mut result: Vec<&'a mut RangeTree<'a>> = Vec::new(); let mut result: Vec<&'a mut RangeTree<'a>> = Vec::new();
for event in events.iter() { for event in events.iter() {
let mut matching_trees: Vec<&'a mut RangeTree<'a>> = Vec::new(); let mut matching_trees: Vec<&'a mut RangeTree<'a>> = Vec::new();
for (_parent_index, children) in child_forests.iter_mut().enumerate() { for children in child_forests.iter_mut() {
let next_tree: Option<&'a mut RangeTree<'a>> = { let next_tree: Option<&'a mut RangeTree<'a>> = {
if children if children
.peek() .peek()

View file

@ -364,7 +364,6 @@ fn generate_coverage_report(
line_counts line_counts
.into_iter() .into_iter()
.enumerate() .enumerate()
.map(|(index, count)| (index, count))
.collect::<Vec<(usize, i64)>>() .collect::<Vec<(usize, i64)>>()
}; };

View file

@ -255,7 +255,7 @@ impl ShellCommand for NpxCommand {
&self, &self,
mut context: ShellCommandContext, mut context: ShellCommandContext,
) -> LocalBoxFuture<'static, ExecuteResult> { ) -> LocalBoxFuture<'static, ExecuteResult> {
if let Some(first_arg) = context.args.get(0).cloned() { if let Some(first_arg) = context.args.first().cloned() {
if let Some(command) = context.state.resolve_command(&first_arg) { if let Some(command) = context.state.resolve_command(&first_arg) {
let context = ShellCommandContext { let context = ShellCommandContext {
args: context.args.iter().skip(1).cloned().collect::<Vec<_>>(), args: context.args.iter().skip(1).cloned().collect::<Vec<_>>(),

View file

@ -50,7 +50,6 @@ mod diagnostics;
pub use self::diagnostics::Diagnostic; pub use self::diagnostics::Diagnostic;
pub use self::diagnostics::DiagnosticCategory; pub use self::diagnostics::DiagnosticCategory;
pub use self::diagnostics::DiagnosticMessageChain;
pub use self::diagnostics::Diagnostics; pub use self::diagnostics::Diagnostics;
pub use self::diagnostics::Position; pub use self::diagnostics::Position;

View file

@ -563,8 +563,8 @@ fn import_key_ec_jwk(
// Import using ring, to validate key // Import using ring, to validate key
let key_alg = match named_curve { let key_alg = match named_curve {
EcNamedCurve::P256 => CryptoNamedCurve::P256.try_into()?, EcNamedCurve::P256 => CryptoNamedCurve::P256.into(),
EcNamedCurve::P384 => CryptoNamedCurve::P256.try_into()?, EcNamedCurve::P384 => CryptoNamedCurve::P256.into(),
EcNamedCurve::P521 => { EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve")) return Err(data_error("Unsupported named curve"))
} }
@ -667,8 +667,8 @@ fn import_key_ec(
// 10. // 10.
if let Some(pk_named_curve) = pk_named_curve { if let Some(pk_named_curve) = pk_named_curve {
let signing_alg = match pk_named_curve { let signing_alg = match pk_named_curve {
EcNamedCurve::P256 => CryptoNamedCurve::P256.try_into()?, EcNamedCurve::P256 => CryptoNamedCurve::P256.into(),
EcNamedCurve::P384 => CryptoNamedCurve::P384.try_into()?, EcNamedCurve::P384 => CryptoNamedCurve::P384.into(),
EcNamedCurve::P521 => { EcNamedCurve::P521 => {
return Err(data_error("Unsupported named curve")) return Err(data_error("Unsupported named curve"))
} }

View file

@ -266,7 +266,7 @@ pub async fn op_crypto_sign_key(
} }
Algorithm::Ecdsa => { Algorithm::Ecdsa => {
let curve: &EcdsaSigningAlgorithm = let curve: &EcdsaSigningAlgorithm =
args.named_curve.ok_or_else(not_supported)?.try_into()?; args.named_curve.ok_or_else(not_supported)?.into();
let rng = RingRand::SystemRandom::new(); let rng = RingRand::SystemRandom::new();
let key_pair = EcdsaKeyPair::from_pkcs8(curve, &args.key.data, &rng)?; let key_pair = EcdsaKeyPair::from_pkcs8(curve, &args.key.data, &rng)?;
@ -387,9 +387,9 @@ pub async fn op_crypto_verify_key(
} }
Algorithm::Ecdsa => { Algorithm::Ecdsa => {
let signing_alg: &EcdsaSigningAlgorithm = let signing_alg: &EcdsaSigningAlgorithm =
args.named_curve.ok_or_else(not_supported)?.try_into()?; args.named_curve.ok_or_else(not_supported)?.into();
let verify_alg: &EcdsaVerificationAlgorithm = let verify_alg: &EcdsaVerificationAlgorithm =
args.named_curve.ok_or_else(not_supported)?.try_into()?; args.named_curve.ok_or_else(not_supported)?.into();
let private_key; let private_key;

View file

@ -706,7 +706,7 @@ mod deprecated {
let env = run_args.env; let env = run_args.env;
let cwd = run_args.cwd; let cwd = run_args.cwd;
let mut c = Command::new(args.get(0).unwrap()); let mut c = Command::new(args.first().unwrap());
(1..args.len()).for_each(|i| { (1..args.len()).for_each(|i| {
let arg = args.get(i).unwrap(); let arg = args.get(i).unwrap();
c.arg(arg); c.arg(arg);

View file

@ -1,3 +1,3 @@
[toolchain] [toolchain]
channel = "1.74.1" channel = "1.75.0"
components = ["rustfmt", "clippy"] components = ["rustfmt", "clippy"]