1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2024-11-21 15:04:11 -05:00

feat(publish): provenance attestation (#22573)

Supply chain security for JSR.

```
$ deno publish --provenance

Successfully published @divy/test_provenance@0.0.3
Provenance transparency log available at https://search.sigstore.dev/?logIndex=73657418
```

0. Package has been published.
1. Fetches the version manifest and verifies it's matching with uploaded
files and exports.
2. Builds the attestation SLSA payload using Github actions env.
3. Creates an ephemeral key pair for signing the github token
(aud=sigstore) and DSSE pre authentication tag.
4. Requests a X.509 signing certificate from Fulcio using the challenge
and ephemeral public key PEM.
5. Prepares a DSSE envelop for Rekor to witness. Posts an intoto entry
to Rekor and gets back the transparency log index.
6. Builds the provenance bundle and posts it to JSR.
This commit is contained in:
Divy Srivastava 2024-02-28 07:58:02 +05:30 committed by GitHub
parent e9fe71acb5
commit 9b5d2f8c1b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 1164 additions and 4 deletions

3
Cargo.lock generated
View file

@ -1087,6 +1087,7 @@ dependencies = [
"once_cell",
"open",
"os_pipe",
"p256",
"percent-encoding",
"phf 0.11.2",
"pin-project",
@ -1094,6 +1095,7 @@ dependencies = [
"quick-junit",
"rand",
"regex",
"reqwest",
"ring",
"rustyline",
"rustyline-derive",
@ -1102,6 +1104,7 @@ dependencies = [
"serde_repr",
"sha2",
"shell-escape",
"spki",
"tar",
"tempfile",
"test_server",

View file

@ -120,12 +120,14 @@ notify.workspace = true
once_cell.workspace = true
open = "5.0.1"
os_pipe.workspace = true
p256.workspace = true
percent-encoding.workspace = true
phf.workspace = true
pin-project.workspace = true
quick-junit = "^0.3.5"
rand = { workspace = true, features = ["small_rng"] }
regex.workspace = true
reqwest.workspace = true
ring.workspace = true
rustyline.workspace = true
rustyline-derive = "=0.7.0"
@ -133,6 +135,7 @@ serde.workspace = true
serde_repr.workspace = true
sha2.workspace = true
shell-escape = "=0.1.5"
spki = { version = "0.7", features = ["pem"] }
tar.workspace = true
tempfile.workspace = true
text-size = "=1.1.0"

View file

@ -302,6 +302,7 @@ pub struct PublishFlags {
pub token: Option<String>,
pub dry_run: bool,
pub allow_slow_types: bool,
pub provenance: bool,
}
#[derive(Clone, Debug, Eq, PartialEq)]
@ -2395,6 +2396,12 @@ fn publish_subcommand() -> Command {
.help("Allow publishing with slow types")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("provenance")
.long("provenance")
.help("From CI/CD system, publicly links the package to where it was built and published from.")
.action(ArgAction::SetTrue)
)
.arg(check_arg(/* type checks by default */ true))
.arg(no_check_arg())
})
@ -3835,6 +3842,7 @@ fn publish_parse(flags: &mut Flags, matches: &mut ArgMatches) {
token: matches.remove_one("token"),
dry_run: matches.get_flag("dry-run"),
allow_slow_types: matches.get_flag("allow-slow-types"),
provenance: matches.get_flag("provenance"),
});
}
@ -8565,6 +8573,26 @@ mod tests {
token: Some("asdf".to_string()),
dry_run: true,
allow_slow_types: true,
provenance: false,
}),
type_check_mode: TypeCheckMode::Local,
..Flags::default()
}
);
}
#[test]
fn publish_provenance_args() {
let r =
flags_from_vec(svec!["deno", "publish", "--provenance", "--token=asdf",]);
assert_eq!(
r.unwrap(),
Flags {
subcommand: DenoSubcommand::Publish(PublishFlags {
token: Some("asdf".to_string()),
dry_run: false,
allow_slow_types: false,
provenance: true,
}),
type_check_mode: TypeCheckMode::Local,
..Flags::default()

View file

@ -17,6 +17,14 @@ pub struct OidcConfig {
pub token: String,
}
pub(crate) fn is_gha() -> bool {
std::env::var("GITHUB_ACTIONS").unwrap_or_default() == "true"
}
pub(crate) fn gha_oidc_token() -> Option<String> {
std::env::var("ACTIONS_ID_TOKEN_REQUEST_TOKEN").ok()
}
fn get_gh_oidc_env_vars() -> Option<Result<(String, String), AnyError>> {
if std::env::var("GITHUB_ACTIONS").unwrap_or_default() == "true" {
let url = std::env::var("ACTIONS_ID_TOKEN_REQUEST_URL");

View file

@ -15,11 +15,13 @@ use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::serde_json;
use deno_core::serde_json::json;
use deno_core::serde_json::Value;
use deno_core::unsync::JoinSet;
use deno_runtime::deno_fetch::reqwest;
use deno_terminal::colors;
use import_map::ImportMap;
use lsp_types::Url;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
@ -48,6 +50,7 @@ mod auth;
mod diagnostics;
mod graph;
mod paths;
mod provenance;
mod publish_order;
mod tar;
mod unfurl;
@ -73,6 +76,7 @@ struct PreparedPublishPackage {
version: String,
tarball: PublishableTarball,
config: String,
exports: HashMap<String, String>,
}
impl PreparedPublishPackage {
@ -161,6 +165,18 @@ async fn prepare_publish(
package: name_no_scope.to_string(),
version: version.to_string(),
tarball,
exports: match &deno_json.json.exports {
Some(Value::Object(exports)) => exports
.into_iter()
.map(|(k, v)| (k.to_string(), v.as_str().unwrap().to_string()))
.collect(),
Some(Value::String(exports)) => {
let mut map = HashMap::new();
map.insert(".".to_string(), exports.to_string());
map
}
_ => HashMap::new(),
},
// the config file is always at the root of a publishing dir,
// so getting the file name is always correct
config: config_path
@ -454,6 +470,7 @@ async fn perform_publish(
mut publish_order_graph: PublishOrderGraph,
mut prepared_package_by_name: HashMap<String, Rc<PreparedPublishPackage>>,
auth_method: AuthMethod,
provenance: bool,
) -> Result<(), AnyError> {
let client = http_client.client()?;
let registry_api_url = jsr_api_url().to_string();
@ -514,6 +531,7 @@ async fn perform_publish(
&registry_api_url,
&registry_url,
&authorization,
provenance,
)
.await
.with_context(|| format!("Failed to publish {}", display_name))?;
@ -540,6 +558,7 @@ async fn publish_package(
registry_api_url: &str,
registry_url: &str,
authorization: &str,
provenance: bool,
) -> Result<(), AnyError> {
let client = http_client.client()?;
println!(
@ -645,6 +664,52 @@ async fn publish_package(
package.package,
package.version
);
if provenance {
// Get the version manifest from JSR
let meta_url = jsr_url().join(&format!(
"@{}/{}/{}_meta.json",
package.scope, package.package, package.version
))?;
let meta_bytes = client.get(meta_url).send().await?.bytes().await?;
if std::env::var("DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING").is_err() {
verify_version_manifest(&meta_bytes, &package)?;
}
let subject = provenance::Subject {
name: format!(
"pkg:jsr/@{}/{}@{}",
package.scope, package.package, package.version
),
digest: provenance::SubjectDigest {
sha256: hex::encode(sha2::Sha256::digest(&meta_bytes)),
},
};
let bundle = provenance::generate_provenance(subject).await?;
let tlog_entry = &bundle.verification_material.tlog_entries[0];
println!("{}",
colors::green(format!(
"Provenance transparency log available at https://search.sigstore.dev/?logIndex={}",
tlog_entry.log_index
))
);
// Submit bundle to JSR
let provenance_url = format!(
"{}scopes/{}/packages/{}/versions/{}/provenance",
registry_api_url, package.scope, package.package, package.version
);
client
.post(provenance_url)
.header(reqwest::header::AUTHORIZATION, authorization)
.json(&json!({ "bundle": bundle }))
.send()
.await?;
}
println!(
"{}",
colors::gray(format!(
@ -826,13 +891,12 @@ pub async fn publish(
Arc::new(ImportMap::new(Url::parse("file:///dev/null").unwrap()))
});
let directory_path = cli_factory.cli_options().initial_cwd();
let mapped_resolver = Arc::new(MappedSpecifierResolver::new(
Some(import_map),
cli_factory.package_json_deps_provider().clone(),
));
let directory_path = cli_factory.cli_options().initial_cwd();
let cli_options = cli_factory.cli_options();
let Some(config_file) = cli_options.maybe_config_file() else {
bail!(
@ -878,6 +942,181 @@ pub async fn publish(
prepared_data.publish_order_graph,
prepared_data.package_by_name,
auth_method,
publish_flags.provenance,
)
.await
.await?;
Ok(())
}
#[derive(Deserialize)]
struct ManifestEntry {
checksum: String,
}
#[derive(Deserialize)]
struct VersionManifest {
manifest: HashMap<String, ManifestEntry>,
exports: HashMap<String, String>,
}
fn verify_version_manifest(
meta_bytes: &[u8],
package: &PreparedPublishPackage,
) -> Result<(), AnyError> {
let manifest = serde_json::from_slice::<VersionManifest>(meta_bytes)?;
// Check that nothing was removed from the manifest.
if manifest.manifest.len() != package.tarball.files.len() {
bail!(
"Mismatch in the number of files in the manifest: expected {}, got {}",
package.tarball.files.len(),
manifest.manifest.len()
);
}
for (path, entry) in manifest.manifest {
// Verify each path with the files in the tarball.
let file = package
.tarball
.files
.iter()
.find(|f| f.path_str == path.as_str());
if let Some(file) = file {
if file.hash != entry.checksum {
bail!(
"Checksum mismatch for {}: expected {}, got {}",
path,
entry.checksum,
file.hash
);
}
} else {
bail!("File {} not found in the tarball", path);
}
}
for (specifier, expected) in &manifest.exports {
let actual = package.exports.get(specifier).ok_or_else(|| {
deno_core::anyhow::anyhow!(
"Export {} not found in the package",
specifier
)
})?;
if actual != expected {
bail!(
"Export {} mismatch: expected {}, got {}",
specifier,
expected,
actual
);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::tar::PublishableTarball;
use super::tar::PublishableTarballFile;
use super::verify_version_manifest;
use std::collections::HashMap;
#[test]
fn test_verify_version_manifest() {
let meta = r#"{
"manifest": {
"mod.ts": {
"checksum": "abc123"
}
},
"exports": {}
}"#;
let meta_bytes = meta.as_bytes();
let package = super::PreparedPublishPackage {
scope: "test".to_string(),
package: "test".to_string(),
version: "1.0.0".to_string(),
tarball: PublishableTarball {
bytes: vec![].into(),
hash: "abc123".to_string(),
files: vec![PublishableTarballFile {
specifier: "file://mod.ts".try_into().unwrap(),
path_str: "mod.ts".to_string(),
hash: "abc123".to_string(),
size: 0,
}],
},
config: "deno.json".to_string(),
exports: HashMap::new(),
};
assert!(verify_version_manifest(meta_bytes, &package).is_ok());
}
#[test]
fn test_verify_version_manifest_missing() {
let meta = r#"{
"manifest": {
"mod.ts": {},
},
"exports": {}
}"#;
let meta_bytes = meta.as_bytes();
let package = super::PreparedPublishPackage {
scope: "test".to_string(),
package: "test".to_string(),
version: "1.0.0".to_string(),
tarball: PublishableTarball {
bytes: vec![].into(),
hash: "abc123".to_string(),
files: vec![PublishableTarballFile {
specifier: "file://mod.ts".try_into().unwrap(),
path_str: "mod.ts".to_string(),
hash: "abc123".to_string(),
size: 0,
}],
},
config: "deno.json".to_string(),
exports: HashMap::new(),
};
assert!(verify_version_manifest(meta_bytes, &package).is_err());
}
#[test]
fn test_verify_version_manifest_invalid_hash() {
let meta = r#"{
"manifest": {
"mod.ts": {
"checksum": "lol123"
},
"exports": {}
}
}"#;
let meta_bytes = meta.as_bytes();
let package = super::PreparedPublishPackage {
scope: "test".to_string(),
package: "test".to_string(),
version: "1.0.0".to_string(),
tarball: PublishableTarball {
bytes: vec![].into(),
hash: "abc123".to_string(),
files: vec![PublishableTarballFile {
specifier: "file://mod.ts".try_into().unwrap(),
path_str: "mod.ts".to_string(),
hash: "abc123".to_string(),
size: 0,
}],
},
config: "deno.json".to_string(),
exports: HashMap::new(),
};
assert!(verify_version_manifest(meta_bytes, &package).is_err());
}
}

View file

@ -0,0 +1,725 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use super::api::OidcTokenResponse;
use super::auth::gha_oidc_token;
use super::auth::is_gha;
use base64::engine::general_purpose::STANDARD_NO_PAD;
use base64::prelude::BASE64_STANDARD;
use base64::Engine as _;
use deno_core::anyhow;
use deno_core::anyhow::bail;
use deno_core::error::AnyError;
use deno_core::serde_json;
use once_cell::sync::Lazy;
use p256::elliptic_curve;
use p256::pkcs8::AssociatedOid;
use reqwest::Client;
use ring::rand::SystemRandom;
use ring::signature::EcdsaKeyPair;
use ring::signature::KeyPair;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
use spki::der::asn1;
use spki::der::pem::LineEnding;
use spki::der::EncodePem;
use std::collections::HashMap;
use std::env;
const PAE_PREFIX: &str = "DSSEv1";
/// DSSE Pre-Auth Encoding
///
/// https://github.com/secure-systems-lab/dsse/blob/master/protocol.md#signature-definition
fn pre_auth_encoding(payload_type: &str, payload: &str) -> Vec<u8> {
format!(
"{} {} {} {} {}",
PAE_PREFIX,
payload_type.len(),
payload_type,
payload.len(),
payload,
)
.into_bytes()
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Signature {
keyid: &'static str,
sig: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Envelope {
payload_type: String,
payload: String,
signatures: Vec<Signature>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SignatureBundle {
#[serde(rename = "$case")]
case: &'static str,
dsse_envelope: Envelope,
}
#[derive(Serialize)]
pub struct SubjectDigest {
pub sha256: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Subject {
pub name: String,
pub digest: SubjectDigest,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct GhaResourceDigest {
git_commit: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct GithubInternalParameters {
event_name: String,
repository_id: String,
repository_owner_id: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct ResourceDescriptor {
uri: String,
digest: Option<GhaResourceDigest>,
}
#[derive(Serialize)]
struct InternalParameters {
github: GithubInternalParameters,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct GhaWorkflow {
#[serde(rename = "ref")]
ref_: String,
repository: String,
path: String,
}
#[derive(Serialize)]
struct ExternalParameters {
workflow: GhaWorkflow,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct BuildDefinition {
build_type: &'static str,
resolved_dependencies: [ResourceDescriptor; 1],
internal_parameters: InternalParameters,
external_parameters: ExternalParameters,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Builder {
id: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Metadata {
invocation_id: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct RunDetails {
builder: Builder,
metadata: Metadata,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Predicate {
build_definition: BuildDefinition,
run_details: RunDetails,
}
impl Predicate {
pub fn new_github_actions() -> Self {
let repo =
std::env::var("GITHUB_REPOSITORY").expect("GITHUB_REPOSITORY not set");
let rel_ref = std::env::var("GITHUB_WORKFLOW_REF")
.unwrap_or_default()
.replace(&format!("{}/", &repo), "");
let delimn = rel_ref.find('@').unwrap();
let (workflow_path, mut workflow_ref) = rel_ref.split_at(delimn);
workflow_ref = &workflow_ref[1..];
let server_url = std::env::var("GITHUB_SERVER_URL").unwrap();
Self {
build_definition: BuildDefinition {
build_type: GITHUB_BUILD_TYPE,
external_parameters: ExternalParameters {
workflow: GhaWorkflow {
ref_: workflow_ref.to_string(),
repository: format!("{}/{}", server_url, &repo),
path: workflow_path.to_string(),
},
},
internal_parameters: InternalParameters {
github: GithubInternalParameters {
event_name: std::env::var("GITHUB_EVENT_NAME").unwrap_or_default(),
repository_id: std::env::var("GITHUB_REPOSITORY_ID")
.unwrap_or_default(),
repository_owner_id: std::env::var("GITHUB_REPOSITORY_OWNER_ID")
.unwrap_or_default(),
},
},
resolved_dependencies: [ResourceDescriptor {
uri: format!(
"git+{}/{}@{}",
server_url,
&repo,
std::env::var("GITHUB_REF").unwrap()
),
digest: Some(GhaResourceDigest {
git_commit: std::env::var("GITHUB_SHA").unwrap(),
}),
}],
},
run_details: RunDetails {
builder: Builder {
id: format!(
"{}/{}",
&GITHUB_BUILDER_ID_PREFIX,
std::env::var("RUNNER_ENVIRONMENT").unwrap()
),
},
metadata: Metadata {
invocation_id: format!(
"{}/{}/actions/runs/{}/attempts/{}",
server_url,
repo,
std::env::var("GITHUB_RUN_ID").unwrap(),
std::env::var("GITHUB_RUN_ATTEMPT").unwrap()
),
},
},
}
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct ProvenanceAttestation {
#[serde(rename = "type")]
_type: &'static str,
subject: Subject,
predicate_type: &'static str,
predicate: Predicate,
}
impl ProvenanceAttestation {
pub fn new_github_actions(subject: Subject) -> Self {
Self {
_type: INTOTO_STATEMENT_TYPE,
subject,
predicate_type: SLSA_PREDICATE_TYPE,
predicate: Predicate::new_github_actions(),
}
}
}
const INTOTO_STATEMENT_TYPE: &str = "https://in-toto.io/Statement/v1";
const SLSA_PREDICATE_TYPE: &str = "https://slsa.dev/provenance/v1";
const INTOTO_PAYLOAD_TYPE: &str = "application/vnd.in-toto+json";
const GITHUB_BUILDER_ID_PREFIX: &str = "https://github.com/actions/runner";
const GITHUB_BUILD_TYPE: &str =
"https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1";
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct X509Certificate {
pub raw_bytes: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct X509CertificateChain {
pub certificates: [X509Certificate; 1],
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct VerificationMaterialContent {
#[serde(rename = "$case")]
pub case: &'static str,
pub x509_certificate_chain: X509CertificateChain,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TlogEntry {
pub log_index: u64,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct VerificationMaterial {
pub content: VerificationMaterialContent,
pub tlog_entries: [TlogEntry; 1],
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ProvenanceBundle {
pub media_type: &'static str,
pub content: SignatureBundle,
pub verification_material: VerificationMaterial,
}
pub async fn generate_provenance(
subject: Subject,
) -> Result<ProvenanceBundle, AnyError> {
if !is_gha() {
bail!("Automatic provenance is only available in GitHub Actions");
}
if gha_oidc_token().is_none() {
bail!(
"Provenance generation in Github Actions requires 'id-token' permission"
);
};
let slsa = ProvenanceAttestation::new_github_actions(subject);
let attestation = serde_json::to_string(&slsa)?;
let bundle = attest(&attestation, INTOTO_PAYLOAD_TYPE).await?;
Ok(bundle)
}
pub async fn attest(
data: &str,
type_: &str,
) -> Result<ProvenanceBundle, AnyError> {
// DSSE Pre-Auth Encoding (PAE) payload
let pae = pre_auth_encoding(type_, data);
let signer = FulcioSigner::new()?;
let (signature, key_material) = signer.sign(&pae).await?;
let content = SignatureBundle {
case: "dsseSignature",
dsse_envelope: Envelope {
payload_type: type_.to_string(),
payload: BASE64_STANDARD.encode(data),
signatures: vec![Signature {
keyid: "",
sig: BASE64_STANDARD.encode(signature.as_ref()),
}],
},
};
let transparency_logs = testify(&content, &key_material.certificate).await?;
// First log entry is the one we're interested in
let (_, log_entry) = transparency_logs.iter().next().unwrap();
let bundle = ProvenanceBundle {
media_type: "application/vnd.in-toto+json",
content,
verification_material: VerificationMaterial {
content: VerificationMaterialContent {
case: "x509CertificateChain",
x509_certificate_chain: X509CertificateChain {
certificates: [X509Certificate {
raw_bytes: key_material.certificate,
}],
},
},
tlog_entries: [TlogEntry {
log_index: log_entry.log_index,
}],
},
};
Ok(bundle)
}
static DEFAULT_FULCIO_URL: Lazy<String> = Lazy::new(|| {
env::var("FULCIO_URL")
.unwrap_or_else(|_| "https://fulcio.sigstore.dev".to_string())
});
struct FulcioSigner {
// The ephemeral key pair used to sign.
ephemeral_signer: EcdsaKeyPair,
rng: SystemRandom,
client: Client,
}
static ALGORITHM: &ring::signature::EcdsaSigningAlgorithm =
&ring::signature::ECDSA_P256_SHA256_ASN1_SIGNING;
struct KeyMaterial {
pub _case: &'static str,
pub certificate: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct PublicKey {
algorithm: &'static str,
content: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct PublicKeyRequest {
public_key: PublicKey,
proof_of_possession: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Credentials {
oidc_identity_token: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct CreateSigningCertificateRequest {
credentials: Credentials,
public_key_request: PublicKeyRequest,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct CertificateChain {
certificates: Vec<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SignedCertificate {
chain: CertificateChain,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SigningCertificateResponse {
signed_certificate_embedded_sct: Option<SignedCertificate>,
signed_certificate_detached_sct: Option<SignedCertificate>,
}
impl FulcioSigner {
pub fn new() -> Result<Self, AnyError> {
let rng = SystemRandom::new();
let document = EcdsaKeyPair::generate_pkcs8(ALGORITHM, &rng)?;
let ephemeral_signer =
EcdsaKeyPair::from_pkcs8(ALGORITHM, document.as_ref(), &rng)?;
Ok(Self {
ephemeral_signer,
rng,
client: Client::new(),
})
}
pub async fn sign(
self,
data: &[u8],
) -> Result<(ring::signature::Signature, KeyMaterial), AnyError> {
// Request token from GitHub Actions for audience "sigstore"
let token = gha_request_token("sigstore").await?;
// Extract the subject from the token
let subject = extract_jwt_subject(&token)?;
// Sign the subject to create a challenge
let challenge =
self.ephemeral_signer.sign(&self.rng, subject.as_bytes())?;
let subject_public_key = self.ephemeral_signer.public_key().as_ref();
let algorithm = spki::AlgorithmIdentifier {
oid: elliptic_curve::ALGORITHM_OID,
parameters: Some((&p256::NistP256::OID).into()),
};
let spki = spki::SubjectPublicKeyInfoRef {
algorithm,
subject_public_key: asn1::BitStringRef::from_bytes(subject_public_key)?,
};
let pem = spki.to_pem(LineEnding::LF)?;
// Create signing certificate
let certificates = self
.create_signing_certificate(&token, pem, challenge)
.await?;
let signature = self.ephemeral_signer.sign(&self.rng, data)?;
Ok((
signature,
KeyMaterial {
_case: "x509Certificate",
certificate: certificates[0].clone(),
},
))
}
async fn create_signing_certificate(
&self,
token: &str,
public_key: String,
challenge: ring::signature::Signature,
) -> Result<Vec<String>, AnyError> {
let url = format!("{}/api/v2/signingCert", *DEFAULT_FULCIO_URL);
let request_body = CreateSigningCertificateRequest {
credentials: Credentials {
oidc_identity_token: token.to_string(),
},
public_key_request: PublicKeyRequest {
public_key: PublicKey {
algorithm: "ECDSA",
content: public_key,
},
proof_of_possession: BASE64_STANDARD.encode(challenge.as_ref()),
},
};
let response = self.client.post(url).json(&request_body).send().await?;
let body: SigningCertificateResponse = response.json().await?;
let key = body
.signed_certificate_embedded_sct
.or(body.signed_certificate_detached_sct)
.ok_or_else(|| anyhow::anyhow!("No certificate chain returned"))?;
Ok(key.chain.certificates)
}
}
#[derive(Deserialize)]
struct JwtSubject<'a> {
email: Option<String>,
sub: String,
iss: &'a str,
}
fn extract_jwt_subject(token: &str) -> Result<String, AnyError> {
let parts: Vec<&str> = token.split('.').collect();
let payload = parts[1];
let payload = STANDARD_NO_PAD.decode(payload)?;
let subject: JwtSubject = serde_json::from_slice(&payload)?;
match subject.iss {
"https://accounts.google.com" | "https://oauth2.sigstore.dev/auth" => {
Ok(subject.email.unwrap_or(subject.sub))
}
_ => Ok(subject.sub),
}
}
async fn gha_request_token(aud: &str) -> Result<String, AnyError> {
let Ok(req_url) = env::var("ACTIONS_ID_TOKEN_REQUEST_URL") else {
bail!("Not running in GitHub Actions");
};
let Some(token) = gha_oidc_token() else {
bail!("No OIDC token available");
};
let client = Client::new();
let res = client
.get(&req_url)
.bearer_auth(token)
.query(&[("audience", aud)])
.send()
.await?
.json::<OidcTokenResponse>()
.await?;
Ok(res.value)
}
static DEFAULT_REKOR_URL: Lazy<String> = Lazy::new(|| {
env::var("REKOR_URL")
.unwrap_or_else(|_| "https://rekor.sigstore.dev".to_string())
});
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LogEntry {
#[serde(rename = "logID")]
pub log_id: String,
pub log_index: u64,
}
type RekorEntry = HashMap<String, LogEntry>;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct RekorSignature {
sig: String,
// `publicKey` is not the standard part of
// DSSE, but it's required by Rekor.
public_key: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct DsseEnvelope {
payload: String,
payload_type: String,
signatures: [RekorSignature; 1],
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntry {
api_version: &'static str,
kind: &'static str,
spec: ProposedIntotoEntrySpec,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntrySpec {
content: ProposedIntotoEntryContent,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntryContent {
envelope: DsseEnvelope,
hash: ProposedIntotoEntryHash,
payload_hash: ProposedIntotoEntryHash,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct ProposedIntotoEntryHash {
algorithm: &'static str,
value: String,
}
// Rekor witness
async fn testify(
content: &SignatureBundle,
public_key: &str,
) -> Result<RekorEntry, AnyError> {
// Rekor "intoto" entry for the given DSSE envelope and signature.
//
// Calculate the value for the payloadHash field into the Rekor entry
let payload_hash = hex::encode(sha2::Sha256::digest(
content.dsse_envelope.payload.as_bytes(),
));
// Calculate the value for the hash field into the Rekor entry
let envelope_hash = hex::encode({
let dsse = DsseEnvelope {
payload: content.dsse_envelope.payload.clone(),
payload_type: content.dsse_envelope.payload_type.clone(),
signatures: [RekorSignature {
sig: content.dsse_envelope.signatures[0].sig.clone(),
public_key: public_key.to_string(),
}],
};
sha2::Sha256::digest(serde_json::to_string(&dsse)?.as_bytes())
});
// Re-create the DSSE envelop. `publicKey` is not the standard part of
// DSSE, but it's required by Rekor.
//
// Double-encode payload and signature cause that's what Rekor expects
let dsse = DsseEnvelope {
payload_type: content.dsse_envelope.payload_type.clone(),
payload: BASE64_STANDARD.encode(content.dsse_envelope.payload.clone()),
signatures: [RekorSignature {
sig: BASE64_STANDARD
.encode(content.dsse_envelope.signatures[0].sig.clone()),
public_key: BASE64_STANDARD.encode(public_key),
}],
};
let proposed_intoto_entry = ProposedIntotoEntry {
api_version: "0.0.2",
kind: "intoto",
spec: ProposedIntotoEntrySpec {
content: ProposedIntotoEntryContent {
envelope: dsse,
hash: ProposedIntotoEntryHash {
algorithm: "sha256",
value: envelope_hash,
},
payload_hash: ProposedIntotoEntryHash {
algorithm: "sha256",
value: payload_hash,
},
},
},
};
let client = Client::new();
let url = format!("{}/api/v1/log/entries", *DEFAULT_REKOR_URL);
let res = client
.post(&url)
.json(&proposed_intoto_entry)
.send()
.await?;
let body: RekorEntry = res.json().await?;
Ok(body)
}
#[cfg(test)]
mod tests {
use super::ProvenanceAttestation;
use super::Subject;
use super::SubjectDigest;
use std::env;
#[test]
fn slsa_github_actions() {
// Set environment variable
if env::var("GITHUB_ACTIONS").is_err() {
env::set_var("CI", "true");
env::set_var("GITHUB_ACTIONS", "true");
env::set_var("ACTIONS_ID_TOKEN_REQUEST_URL", "https://example.com");
env::set_var("ACTIONS_ID_TOKEN_REQUEST_TOKEN", "dummy");
env::set_var("GITHUB_REPOSITORY", "littledivy/deno_sdl2");
env::set_var("GITHUB_SERVER_URL", "https://github.com");
env::set_var("GITHUB_REF", "refs/tags/sdl2@0.0.1");
env::set_var("GITHUB_SHA", "lol");
env::set_var("GITHUB_RUN_ID", "1");
env::set_var("GITHUB_RUN_ATTEMPT", "1");
env::set_var("RUNNER_ENVIRONMENT", "github-hosted");
env::set_var(
"GITHUB_WORKFLOW_REF",
"littledivy/deno_sdl2@refs/tags/sdl2@0.0.1",
);
}
let subject = Subject {
name: "jsr:@divy/sdl2@0.0.1".to_string(),
digest: SubjectDigest {
sha256: "yourmom".to_string(),
},
};
let slsa = ProvenanceAttestation::new_github_actions(subject);
assert_eq!(slsa.subject.name, "jsr:@divy/sdl2@0.0.1");
assert_eq!(slsa.subject.digest.sha256, "yourmom");
}
}

View file

@ -25,7 +25,9 @@ use super::unfurl::SpecifierUnfurler;
#[derive(Debug, Clone, PartialEq)]
pub struct PublishableTarballFile {
pub path_str: String,
pub specifier: Url,
pub hash: String,
pub size: usize,
}
@ -153,7 +155,10 @@ pub fn create_gzipped_tarball(
diagnostics_collector,
)?;
files.push(PublishableTarballFile {
path_str: path_str.clone(),
specifier: specifier.clone(),
// This hash string matches the checksum computed by registry
hash: format!("sha256-{:x}", sha2::Sha256::digest(&content)),
size: content.len(),
});
tar

View file

@ -4,6 +4,7 @@ use deno_core::serde_json::json;
use test_util::assert_contains;
use test_util::assert_not_contains;
use test_util::env_vars_for_jsr_npm_tests;
use test_util::env_vars_for_jsr_provenance_tests;
use test_util::env_vars_for_jsr_tests;
use test_util::env_vars_for_npm_tests;
use test_util::itest;
@ -164,6 +165,14 @@ itest!(successful {
http_server: true,
});
itest!(provenance {
args: "publish --provenance",
output: "publish/successful_provenance.out",
cwd: Some("publish/successful"),
envs: env_vars_for_jsr_provenance_tests(),
http_server: true,
});
itest!(no_check {
args: "publish --token 'sadfasdf' --no-check",
// still type checks the slow types output though

View file

@ -0,0 +1,7 @@
Check file:///[WILDCARD]/publish/successful/mod.ts
Checking for slow types in the public API...
Check file:///[WILDCARD]/publish/successful/mod.ts
Publishing @foo/bar@1.0.0 ...
Successfully published @foo/bar@1.0.0
Provenance transparency log available at https://search.sigstore.dev/?logIndex=42069
Visit http://127.0.0.1:4250/@foo/bar@1.0.0 for details

View file

@ -64,6 +64,50 @@ pub fn env_vars_for_jsr_tests() -> Vec<(String, String)> {
]
}
pub fn env_vars_for_jsr_provenance_tests() -> Vec<(String, String)> {
let mut envs = env_vars_for_jsr_tests();
envs.extend(vec![
("REKOR_URL".to_string(), rekor_url()),
("FULCIO_URL".to_string(), fulcio_url()),
(
"DISABLE_JSR_MANIFEST_VERIFICATION_FOR_TESTING".to_string(),
"true".to_string(),
),
]);
// set GHA variable for attestation.
envs.extend([
("CI".to_string(), "true".to_string()),
("GITHUB_ACTIONS".to_string(), "true".to_string()),
("ACTIONS_ID_TOKEN_REQUEST_URL".to_string(), gha_token_url()),
(
"ACTIONS_ID_TOKEN_REQUEST_TOKEN".to_string(),
"dummy".to_string(),
),
(
"GITHUB_REPOSITORY".to_string(),
"littledivy/deno_sdl2".to_string(),
),
(
"GITHUB_SERVER_URL".to_string(),
"https://github.com".to_string(),
),
("GITHUB_REF".to_string(), "refs/tags/sdl2@0.0.1".to_string()),
("GITHUB_SHA".to_string(), "lol".to_string()),
("GITHUB_RUN_ID".to_string(), "1".to_string()),
("GITHUB_RUN_ATTEMPT".to_string(), "1".to_string()),
(
"RUNNER_ENVIRONMENT".to_string(),
"github-hosted".to_string(),
),
(
"GITHUB_WORKFLOW_REF".to_string(),
"littledivy/deno_sdl2@refs/tags/sdl2@0.0.1".to_string(),
),
]);
envs
}
pub fn env_vars_for_jsr_npm_tests() -> Vec<(String, String)> {
vec![
("NPM_CONFIG_REGISTRY".to_string(), npm_registry_url()),
@ -125,6 +169,18 @@ pub fn jsr_registry_url() -> String {
"http://127.0.0.1:4250/".to_string()
}
pub fn rekor_url() -> String {
"http://127.0.0.1:4251".to_string()
}
pub fn fulcio_url() -> String {
"http://127.0.0.1:4251".to_string()
}
pub fn gha_token_url() -> String {
"http://127.0.0.1:4251/gha_oidc?test=true".to_string()
}
pub fn jsr_registry_unset_url() -> String {
"http://JSR_URL.is.unset".to_string()
}

View file

@ -84,6 +84,7 @@ const WS_PING_PORT: u16 = 4245;
const H2_GRPC_PORT: u16 = 4246;
const H2S_GRPC_PORT: u16 = 4247;
const REGISTRY_SERVER_PORT: u16 = 4250;
const PROVENANCE_MOCK_SERVER_PORT: u16 = 4251;
// Use the single-threaded scheduler. The hyper server is used as a point of
// comparison for the (single-threaded!) benchmarks in cli/bench. We're not
@ -127,6 +128,8 @@ pub async fn run_all_servers() {
let h2_grpc_server_fut = grpc::h2_grpc_server(H2_GRPC_PORT, H2S_GRPC_PORT);
let registry_server_fut = registry::registry_server(REGISTRY_SERVER_PORT);
let provenance_mock_server_fut =
registry::provenance_mock_server(PROVENANCE_MOCK_SERVER_PORT);
let server_fut = async {
futures::join!(
@ -154,6 +157,7 @@ pub async fn run_all_servers() {
h2_only_server_fut,
h2_grpc_server_fut,
registry_server_fut,
provenance_mock_server_fut,
)
}
.boxed_local();

View file

@ -5,6 +5,8 @@ use crate::testdata_path;
use super::run_server;
use super::ServerKind;
use super::ServerOptions;
use base64::engine::general_purpose::STANDARD_NO_PAD;
use base64::Engine as _;
use bytes::Bytes;
use http_body_util::combinators::UnsyncBoxBody;
use http_body_util::Empty;
@ -36,6 +38,77 @@ pub async fn registry_server(port: u16) {
.await
}
pub async fn provenance_mock_server(port: u16) {
let addr = SocketAddr::from(([127, 0, 0, 1], port));
run_server(
ServerOptions {
addr,
error_msg: "Provenance mock server error",
kind: ServerKind::Auto,
},
provenance_mock_server_handler,
)
.await
}
async fn provenance_mock_server_handler(
req: Request<Incoming>,
) -> Result<Response<UnsyncBoxBody<Bytes, Infallible>>, anyhow::Error> {
let path = req.uri().path();
// OIDC request
if path.starts_with("/gha_oidc") {
let jwt_claim = json!({
"sub": "divy",
"email": "divy@deno.com",
"iss": "https://github.com",
});
let token = format!(
"AAA.{}.",
STANDARD_NO_PAD.encode(serde_json::to_string(&jwt_claim).unwrap())
);
let body = serde_json::to_string_pretty(&json!({
"value": token,
}));
let res = Response::new(UnsyncBoxBody::new(Full::from(body.unwrap())));
return Ok(res);
}
// Fulcio
if path.starts_with("/api/v2/signingCert") {
let body = serde_json::to_string_pretty(&json!({
"signedCertificateEmbeddedSct": {
"chain": {
"certificates": [
"fake_certificate"
]
}
}
}));
let res = Response::new(UnsyncBoxBody::new(Full::from(body.unwrap())));
return Ok(res);
}
// Rekor
if path.starts_with("/api/v1/log/entries") {
let body = serde_json::to_string_pretty(&json!({
"transparency_log_1": {
"logID": "test_log_id",
"logIndex": 42069,
}
}));
let res = Response::new(UnsyncBoxBody::new(Full::from(body.unwrap())));
return Ok(res);
}
let empty_body = UnsyncBoxBody::new(Empty::new());
let res = Response::builder()
.status(StatusCode::NOT_FOUND)
.body(empty_body)?;
Ok(res)
}
async fn registry_server_handler(
req: Request<Incoming>,
) -> Result<Response<UnsyncBoxBody<Bytes, Infallible>>, anyhow::Error> {