diff --git a/Cargo.lock b/Cargo.lock index 46e18da..3fefadf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20,6 +20,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + [[package]] name = "async-recursion" version = "1.1.1" @@ -318,6 +324,24 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "env_home" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + [[package]] name = "errno" version = "0.3.14" @@ -340,6 +364,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "flagset" version = "0.4.7" @@ -506,6 +536,12 @@ dependencies = [ "regex-syntax", ] +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + [[package]] name = "heck" version = "0.5.0" @@ -745,6 +781,16 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "indexmap" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +dependencies = [ + "equivalent", + "hashbrown", +] + [[package]] name = "ipnet" version = "2.11.0" @@ -761,6 +807,15 @@ dependencies = [ "serde", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.15" @@ -853,6 +908,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "multimap" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" + [[package]] name = "num-traits" version = "0.2.19" @@ -885,6 +946,15 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + [[package]] name = "parking_lot" version = "0.12.5" @@ -933,6 +1003,16 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset", + "indexmap", +] + [[package]] name = "pin-project" version = "1.1.10" @@ -983,6 +1063,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" version = "1.0.103" @@ -992,6 +1082,93 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prost" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7231bd9b3d3d33c86b58adbac74b5ec0ad9f496b19d22801d773636feaa95f3d" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac6c3320f9abac597dcbc668774ef006702672474aad53c6d596b62e487b40b1" +dependencies = [ + "heck", + "itertools", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9120690fafc389a67ba3803df527d0ec9cbbc9cc45e4cc20b332996dfb672425" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-reflect" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b89455ef41ed200cafc47c76c552ee7792370ac420497e551f16123a9135f76e" +dependencies = [ + "base64", + "prost", + "prost-reflect-derive", + "prost-types", + "serde", + "serde-value", +] + +[[package]] +name = "prost-reflect-build" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8214ae2c30bbac390db0134d08300e770ef89b6d4e5abf855e8d300eded87e28" +dependencies = [ + "prost-build", + "prost-reflect", +] + +[[package]] +name = "prost-reflect-derive" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b6d90e29fa6c0d13c2c19ba5e4b3fb0efbf5975d27bcf4e260b7b15455bcabe" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-types" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9b4db3d6da204ed77bb26ba83b6122a73aeb2e87e25fbf7ad2e84c4ccbf8f72" +dependencies = [ + "prost", +] + [[package]] name = "quinn" version = "0.11.9" @@ -1348,6 +1525,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_core" version = "1.0.228" @@ -1577,6 +1764,16 @@ dependencies = [ "url", ] +[[package]] +name = "sigstore-protobuf-specs-derive" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80baa401f274093f7bb27d7a69d6139cbc11f1b97624e9a61a9b3ea32c776a35" +dependencies = [ + "quote", + "syn", +] + [[package]] name = "sigstore-rekor" version = "0.5.0" @@ -1631,6 +1828,7 @@ dependencies = [ "serde_json", "sigstore-crypto", "sigstore-types", + "sigstore_protobuf_specs", "thiserror", "tokio", "tough", @@ -1675,6 +1873,7 @@ dependencies = [ "pem", "serde", "serde_json", + "sigstore_protobuf_specs", "thiserror", ] @@ -1708,6 +1907,25 @@ dependencies = [ "x509-cert", ] +[[package]] +name = "sigstore_protobuf_specs" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4827a7a6b539af686abf27c09cb3ddc76c786c0b8b999a1b13566747b22e77c" +dependencies = [ + "anyhow", + "glob", + "prost", + "prost-build", + "prost-reflect", + "prost-reflect-build", + "prost-types", + "serde", + "serde_json", + "sigstore-protobuf-specs-derive", + "which", +] + [[package]] name = "slab" version = "0.4.11" @@ -2250,6 +2468,17 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "which" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d" +dependencies = [ + "env_home", + "rustix", + "winsafe", +] + [[package]] name = "winapi-util" version = "0.1.11" @@ -2474,6 +2703,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + [[package]] name = "wit-bindgen" version = "0.46.0" diff --git a/Cargo.toml b/Cargo.toml index 430b1cc..6f7fc0b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,6 +76,9 @@ tracing = { version = "0.1" } # Serialization / JSON serde_json_canonicalizer = "0.3" +# Sigstore protobuf specs +sigstore_protobuf_specs = "0.5" + # TUF / Package management tough = { version = "0.21", features = ["http"] } diff --git a/crates/sigstore-bundle/src/builder.rs b/crates/sigstore-bundle/src/builder.rs index 04e54cb..0388eb5 100644 --- a/crates/sigstore-bundle/src/builder.rs +++ b/crates/sigstore-bundle/src/builder.rs @@ -3,12 +3,12 @@ use sigstore_rekor::entry::LogEntry; use sigstore_types::{ bundle::{ - CertificateContent, CheckpointData, InclusionPromise, InclusionProof, KindVersion, LogId, - MessageSignature, Rfc3161Timestamp, SignatureContent, TimestampVerificationData, - TransparencyLogEntry, VerificationMaterial, VerificationMaterialContent, + BundleContent, InclusionPromise, InclusionProof, KindVersion, LogId, MessageSignature, + ProtoCheckpoint, Rfc3161SignedTimestamp, TimestampVerificationData, TransparencyLogEntry, + VerificationMaterial, VerificationMaterialContent, X509Certificate, }, - Bundle, CanonicalizedBody, DerCertificate, DsseEnvelope, LogKeyId, MediaType, Sha256Hash, - SignatureBytes, SignedTimestamp, TimestampToken, + Bundle, DerCertificate, DsseEnvelope, HashOutput, MediaType, ProtoHashAlgorithm, + PublicKeyIdentifier, Sha256Hash, SignatureBytes, SignedTimestamp, }; /// Verification material for v0.3 bundles. @@ -23,6 +23,18 @@ pub enum VerificationMaterialV03 { PublicKey { hint: String }, } +/// Signature content for a bundle +#[derive(Debug, Clone)] +pub enum SignatureContent { + /// A message signature with digest + MessageSignature { + signature: SignatureBytes, + digest: Sha256Hash, + }, + /// A DSSE envelope + DsseEnvelope(DsseEnvelope), +} + /// A Sigstore bundle in v0.3 format. /// /// The v0.3 format requires: @@ -47,8 +59,8 @@ pub struct BundleV03 { pub content: SignatureContent, /// Transparency log entries pub tlog_entries: Vec, - /// RFC 3161 timestamps - pub rfc3161_timestamps: Vec, + /// RFC 3161 timestamps (raw DER bytes) + pub rfc3161_timestamps: Vec>, } impl BundleV03 { @@ -72,13 +84,10 @@ impl BundleV03 { ) -> Self { Self::new( VerificationMaterialV03::Certificate(certificate), - SignatureContent::MessageSignature(MessageSignature { - message_digest: Some(sigstore_types::bundle::MessageDigest { - algorithm: sigstore_types::HashAlgorithm::Sha2256, - digest: artifact_digest, - }), + SignatureContent::MessageSignature { signature, - }), + digest: artifact_digest, + }, ) } @@ -99,10 +108,8 @@ impl BundleV03 { } /// Add an RFC 3161 timestamp. - pub fn with_rfc3161_timestamp(mut self, timestamp: TimestampToken) -> Self { - self.rfc3161_timestamps.push(Rfc3161Timestamp { - signed_timestamp: timestamp, - }); + pub fn with_rfc3161_timestamp(mut self, timestamp: Vec) -> Self { + self.rfc3161_timestamps.push(timestamp); self } @@ -110,34 +117,55 @@ impl BundleV03 { pub fn into_bundle(self) -> Bundle { let verification_content = match self.verification { VerificationMaterialV03::Certificate(cert) => { - VerificationMaterialContent::Certificate(CertificateContent { raw_bytes: cert }) + VerificationMaterialContent::Certificate(X509Certificate { + raw_bytes: cert.as_bytes().to_vec(), + }) } VerificationMaterialV03::PublicKey { hint } => { - VerificationMaterialContent::PublicKey { hint } + VerificationMaterialContent::PublicKey(PublicKeyIdentifier { hint }) + } + }; + + let bundle_content = match self.content { + SignatureContent::MessageSignature { signature, digest } => { + BundleContent::MessageSignature(MessageSignature { + message_digest: Some(HashOutput { + algorithm: ProtoHashAlgorithm::Sha2256 as i32, + digest: digest.as_bytes().to_vec(), + }), + signature: signature.as_bytes().to_vec(), + }) } + SignatureContent::DsseEnvelope(envelope) => BundleContent::DsseEnvelope(envelope), }; Bundle { media_type: MediaType::Bundle0_3.as_str().to_string(), - verification_material: VerificationMaterial { - content: verification_content, + verification_material: Some(VerificationMaterial { + content: Some(verification_content), tlog_entries: self.tlog_entries, - timestamp_verification_data: TimestampVerificationData { - rfc3161_timestamps: self.rfc3161_timestamps, - }, - }, - content: self.content, + timestamp_verification_data: Some(TimestampVerificationData { + rfc3161_timestamps: self + .rfc3161_timestamps + .into_iter() + .map(|ts| Rfc3161SignedTimestamp { + signed_timestamp: ts, + }) + .collect(), + }), + }), + content: Some(bundle_content), } } } /// Helper to create a transparency log entry. pub struct TlogEntryBuilder { - log_index: u64, - log_id: String, + log_index: i64, + log_id: Vec, kind: String, kind_version: String, - integrated_time: u64, + integrated_time: i64, canonicalized_body: Vec, inclusion_promise: Option, inclusion_proof: Option, @@ -148,7 +176,7 @@ impl TlogEntryBuilder { pub fn new() -> Self { Self { log_index: 0, - log_id: String::new(), + log_id: Vec::new(), kind: "hashedrekord".to_string(), kind_version: "0.0.1".to_string(), integrated_time: 0, @@ -168,18 +196,15 @@ impl TlogEntryBuilder { /// * `kind` - The entry kind (e.g., "hashedrekord", "dsse") /// * `version` - The entry version (e.g., "0.0.1") pub fn from_log_entry(entry: &LogEntry, kind: &str, version: &str) -> Self { - // Convert hex log_id to base64 using the type-safe method - let log_id_base64 = entry - .log_id - .to_base64() - .unwrap_or_else(|_| entry.log_id.to_string()); + // Convert hex log_id to raw bytes + let log_id_bytes = entry.log_id.decode().unwrap_or_default(); let mut builder = Self { - log_index: entry.log_index as u64, - log_id: log_id_base64, + log_index: entry.log_index, + log_id: log_id_bytes, kind: kind.to_string(), kind_version: version.to_string(), - integrated_time: entry.integrated_time as u64, + integrated_time: entry.integrated_time, canonicalized_body: entry.body.as_bytes().to_vec(), inclusion_promise: None, inclusion_proof: None, @@ -189,31 +214,32 @@ impl TlogEntryBuilder { if let Some(verification) = &entry.verification { if let Some(set) = &verification.signed_entry_timestamp { builder.inclusion_promise = Some(InclusionPromise { - signed_entry_timestamp: set.clone(), + signed_entry_timestamp: set.as_bytes().to_vec(), }); } if let Some(proof) = &verification.inclusion_proof { - // Rekor V1 API returns hashes as hex, bundle format expects base64 - // Convert root_hash from hex to Sha256Hash + // Rekor V1 API returns hashes as hex, bundle format expects raw bytes let root_hash = Sha256Hash::from_hex(&proof.root_hash) - .unwrap_or_else(|_| Sha256Hash::from_bytes([0u8; 32])); + .map(|h| h.as_bytes().to_vec()) + .unwrap_or_default(); - // Convert all proof hashes from hex to Sha256Hash - let hashes: Vec = proof + // Convert all proof hashes from hex to raw bytes + let hashes: Vec> = proof .hashes .iter() .filter_map(|h| Sha256Hash::from_hex(h).ok()) + .map(|h| h.as_bytes().to_vec()) .collect(); builder.inclusion_proof = Some(InclusionProof { - log_index: proof.log_index.to_string().into(), + log_index: proof.log_index, root_hash, - tree_size: proof.tree_size.to_string(), + tree_size: proof.tree_size, hashes, - checkpoint: CheckpointData { + checkpoint: Some(ProtoCheckpoint { envelope: proof.checkpoint.clone(), - }, + }), }); } } @@ -222,13 +248,13 @@ impl TlogEntryBuilder { } /// Set the log index. - pub fn log_index(mut self, index: u64) -> Self { + pub fn log_index(mut self, index: i64) -> Self { self.log_index = index; self } /// Set the integrated time (Unix timestamp). - pub fn integrated_time(mut self, time: u64) -> Self { + pub fn integrated_time(mut self, time: i64) -> Self { self.integrated_time = time; self } @@ -236,7 +262,7 @@ impl TlogEntryBuilder { /// Set the inclusion promise (Signed Entry Timestamp). pub fn inclusion_promise(mut self, signed_entry_timestamp: SignedTimestamp) -> Self { self.inclusion_promise = Some(InclusionPromise { - signed_entry_timestamp, + signed_entry_timestamp: signed_entry_timestamp.as_bytes().to_vec(), }); self } @@ -251,20 +277,20 @@ impl TlogEntryBuilder { /// * `checkpoint` - The checkpoint envelope pub fn inclusion_proof( mut self, - log_index: u64, + log_index: i64, root_hash: Sha256Hash, - tree_size: u64, + tree_size: i64, hashes: Vec, checkpoint: String, ) -> Self { self.inclusion_proof = Some(InclusionProof { - log_index: log_index.to_string().into(), - root_hash, - tree_size: tree_size.to_string(), - hashes, - checkpoint: CheckpointData { + log_index, + root_hash: root_hash.as_bytes().to_vec(), + tree_size, + hashes: hashes.iter().map(|h| h.as_bytes().to_vec()).collect(), + checkpoint: Some(ProtoCheckpoint { envelope: checkpoint, - }, + }), }); self } @@ -272,24 +298,18 @@ impl TlogEntryBuilder { /// Build the transparency log entry. pub fn build(self) -> TransparencyLogEntry { TransparencyLogEntry { - log_index: self.log_index.to_string().into(), - log_id: LogId { - key_id: LogKeyId::new(self.log_id), - }, - kind_version: KindVersion { + log_index: self.log_index, + log_id: Some(LogId { + key_id: self.log_id, + }), + kind_version: Some(KindVersion { kind: self.kind, version: self.kind_version, - }, - // For V2 entries, integrated_time is 0 and should be omitted from JSON - // (skip_serializing_if = "String::is_empty" handles this) - integrated_time: if self.integrated_time == 0 { - String::new() - } else { - self.integrated_time.to_string() - }, + }), + integrated_time: self.integrated_time, inclusion_promise: self.inclusion_promise, inclusion_proof: self.inclusion_proof, - canonicalized_body: CanonicalizedBody::new(self.canonicalized_body), + canonicalized_body: self.canonicalized_body, } } } diff --git a/crates/sigstore-bundle/src/validation.rs b/crates/sigstore-bundle/src/validation.rs index b96d7e9..e633d2d 100644 --- a/crates/sigstore-bundle/src/validation.rs +++ b/crates/sigstore-bundle/src/validation.rs @@ -4,7 +4,7 @@ use crate::error::{Error, Result}; use sigstore_merkle::verify_inclusion_proof; -use sigstore_types::{Bundle, MediaType, Sha256Hash}; +use sigstore_types::{Bundle, BundleExt, InclusionProofExt, MediaType, Sha256Hash}; /// Validation options #[derive(Debug, Clone)] @@ -76,14 +76,24 @@ fn validate_v0_2(bundle: &Bundle, options: &ValidationOptions) -> Result<()> { /// Validate a v0.3 bundle fn validate_v0_3(bundle: &Bundle, options: &ValidationOptions) -> Result<()> { // v0.3 must have single certificate (not chain) or public key - match &bundle.verification_material.content { - sigstore_types::bundle::VerificationMaterialContent::Certificate(_) => {} - sigstore_types::bundle::VerificationMaterialContent::X509CertificateChain { .. } => { + let vm = bundle + .verification_material + .as_ref() + .ok_or_else(|| Error::Validation("bundle missing verification material".to_string()))?; + + match &vm.content { + Some(sigstore_types::bundle::VerificationMaterialContent::Certificate(_)) => {} + Some(sigstore_types::bundle::VerificationMaterialContent::X509CertificateChain(_)) => { return Err(Error::Validation( "v0.3 bundle must use single certificate, not chain".to_string(), )); } - sigstore_types::bundle::VerificationMaterialContent::PublicKey { .. } => {} + Some(sigstore_types::bundle::VerificationMaterialContent::PublicKey(_)) => {} + None => { + return Err(Error::Validation( + "bundle missing verification material content".to_string(), + )); + } } // v0.3 requires inclusion proof @@ -102,24 +112,31 @@ fn validate_v0_3(bundle: &Bundle, options: &ValidationOptions) -> Result<()> { /// Common validation for all bundle versions fn validate_common(bundle: &Bundle, options: &ValidationOptions) -> Result<()> { + let vm = bundle + .verification_material + .as_ref() + .ok_or_else(|| Error::Validation("bundle missing verification material".to_string()))?; + // Must have at least one tlog entry - if bundle.verification_material.tlog_entries.is_empty() { + if vm.tlog_entries.is_empty() { return Err(Error::Validation( "bundle must have at least one tlog entry".to_string(), )); } // Check timestamp if required - if options.require_timestamp - && bundle - .verification_material + if options.require_timestamp { + let has_timestamps = vm .timestamp_verification_data - .rfc3161_timestamps - .is_empty() - { - return Err(Error::Validation( - "bundle must have timestamp verification data".to_string(), - )); + .as_ref() + .map(|tvd| !tvd.rfc3161_timestamps.is_empty()) + .unwrap_or(false); + + if !has_timestamps { + return Err(Error::Validation( + "bundle must have timestamp verification data".to_string(), + )); + } } Ok(()) @@ -127,29 +144,39 @@ fn validate_common(bundle: &Bundle, options: &ValidationOptions) -> Result<()> { /// Validate inclusion proofs in the bundle fn validate_inclusion_proofs(bundle: &Bundle) -> Result<()> { - for entry in &bundle.verification_material.tlog_entries { + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), // No verification material means no proofs to validate + }; + + for entry in &vm.tlog_entries { if let Some(proof) = &entry.inclusion_proof { // Parse the checkpoint to get the expected root let checkpoint = proof - .checkpoint - .parse() + .parse_checkpoint() .map_err(|e| Error::Validation(format!("failed to parse checkpoint: {}", e)))?; // Get the leaf (canonicalized body) bytes - let leaf_data = entry.canonicalized_body.as_bytes(); - - // Get proof hashes (already decoded as Vec) - let proof_hashes: &[Sha256Hash] = &proof.hashes; - - // Parse indices - let leaf_index: u64 = proof - .log_index - .as_u64() - .map_err(|_| Error::Validation("invalid log_index in proof".to_string()))?; - let tree_size: u64 = proof - .tree_size - .parse() - .map_err(|_| Error::Validation("invalid tree_size in proof".to_string()))?; + let leaf_data = &entry.canonicalized_body; + + // Get proof hashes (now Vec>) + let proof_hashes: Vec = proof + .hashes + .iter() + .filter_map(|h| { + if h.len() == 32 { + let mut arr = [0u8; 32]; + arr.copy_from_slice(h); + Some(Sha256Hash::from_bytes(arr)) + } else { + None + } + }) + .collect(); + + // Get indices (now i64) + let leaf_index = proof.log_index_u64(); + let tree_size = proof.tree_size_u64(); // Get expected root from checkpoint (already a Sha256Hash) let expected_root = checkpoint.root_hash; @@ -162,7 +189,7 @@ fn validate_inclusion_proofs(bundle: &Bundle) -> Result<()> { &leaf_hash, leaf_index, tree_size, - proof_hashes, + &proof_hashes, &expected_root, ) .map_err(|e| { diff --git a/crates/sigstore-bundle/tests/bundle_v3_tests.rs b/crates/sigstore-bundle/tests/bundle_v3_tests.rs index c87535a..c1fce91 100644 --- a/crates/sigstore-bundle/tests/bundle_v3_tests.rs +++ b/crates/sigstore-bundle/tests/bundle_v3_tests.rs @@ -3,7 +3,7 @@ //! These tests use real bundle fixtures from the sigstore-python project. use sigstore_bundle::{validate_bundle, validate_bundle_with_options, ValidationOptions}; -use sigstore_types::{Bundle, LogIndex, MediaType}; +use sigstore_types::{Bundle, BundleContent, BundleExt, InclusionProofExt, MediaType}; /// Test bundle JSON from sigstore-python/test/assets/bundle_v3.txt.sigstore const BUNDLE_V3_JSON: &str = r#"{ @@ -71,12 +71,14 @@ fn test_parse_v3_bundle() { assert!(bundle.signing_certificate().is_some()); // Check tlog entries - assert_eq!(bundle.verification_material.tlog_entries.len(), 1); - let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.log_index, LogIndex::new("25915956".to_string())); - assert_eq!(entry.integrated_time, "1712085549"); - assert_eq!(entry.kind_version.kind, "hashedrekord"); - assert_eq!(entry.kind_version.version, "0.0.1"); + let vm = bundle.verification_material.as_ref().unwrap(); + assert_eq!(vm.tlog_entries.len(), 1); + let entry = &vm.tlog_entries[0]; + assert_eq!(entry.log_index, 25915956_i64); + assert_eq!(entry.integrated_time, 1712085549_i64); + let kv = entry.kind_version.as_ref().unwrap(); + assert_eq!(kv.kind, "hashedrekord"); + assert_eq!(kv.version, "0.0.1"); // Check inclusion proof exists assert!(bundle.has_inclusion_proof()); @@ -84,8 +86,8 @@ fn test_parse_v3_bundle() { // Check inclusion proof details let proof = entry.inclusion_proof.as_ref().unwrap(); - assert_eq!(proof.log_index, LogIndex::new("25901137".to_string())); - assert_eq!(proof.tree_size, "25901138"); + assert_eq!(proof.log_index, 25901137_i64); + assert_eq!(proof.tree_size, 25901138_i64); assert_eq!(proof.hashes.len(), 11); } @@ -115,11 +117,12 @@ fn test_validate_v3_bundle_with_options() { #[test] fn test_v3_bundle_checkpoint_parsing() { let bundle = Bundle::from_json(BUNDLE_V3_JSON).unwrap(); - let entry = &bundle.verification_material.tlog_entries[0]; + let vm = bundle.verification_material.as_ref().unwrap(); + let entry = &vm.tlog_entries[0]; let proof = entry.inclusion_proof.as_ref().unwrap(); - // Parse the checkpoint - let checkpoint = proof.checkpoint.parse().unwrap(); + // Parse the checkpoint using the extension trait + let checkpoint = proof.parse_checkpoint().unwrap(); assert_eq!( checkpoint.origin, @@ -134,21 +137,24 @@ fn test_v3_bundle_message_signature() { let bundle = Bundle::from_json(BUNDLE_V3_JSON).unwrap(); match &bundle.content { - sigstore_types::bundle::SignatureContent::MessageSignature(sig) => { + Some(BundleContent::MessageSignature(sig)) => { // Check signature is present - assert!(!sig.signature.as_bytes().is_empty()); + assert!(!sig.signature.is_empty()); // Check message digest let digest = sig.message_digest.as_ref().unwrap(); assert_eq!( - digest.algorithm, - sigstore_types::hash::HashAlgorithm::Sha2256 + digest.algorithm(), + sigstore_types::ProtoHashAlgorithm::Sha2256 ); - assert!(!digest.digest.as_bytes().is_empty()); + assert!(!digest.digest.is_empty()); } - sigstore_types::bundle::SignatureContent::DsseEnvelope(_) => { + Some(BundleContent::DsseEnvelope(_)) => { panic!("Expected MessageSignature, got DsseEnvelope"); } + None => { + panic!("Expected MessageSignature, got None"); + } } } @@ -164,10 +170,9 @@ fn test_v3_bundle_serialization_roundtrip() { // Compare assert_eq!(bundle.media_type, bundle2.media_type); - assert_eq!( - bundle.verification_material.tlog_entries.len(), - bundle2.verification_material.tlog_entries.len() - ); + let vm1 = bundle.verification_material.as_ref().unwrap(); + let vm2 = bundle2.verification_material.as_ref().unwrap(); + assert_eq!(vm1.tlog_entries.len(), vm2.tlog_entries.len()); } #[test] @@ -206,28 +211,35 @@ fn test_v3_bundle_certificate_extraction() { #[test] fn test_inclusion_proof_verification() { use sigstore_merkle::{hash_leaf, verify_inclusion_proof}; + use sigstore_types::Sha256Hash; let bundle = Bundle::from_json(BUNDLE_V3_JSON).unwrap(); - let entry = &bundle.verification_material.tlog_entries[0]; + let vm = bundle.verification_material.as_ref().unwrap(); + let entry = &vm.tlog_entries[0]; let proof = entry.inclusion_proof.as_ref().unwrap(); - // Get the canonicalized body bytes - let body = entry.canonicalized_body.as_bytes(); + // Get the canonicalized body bytes (now Vec) + let body = &entry.canonicalized_body; // Hash the leaf let leaf_hash = hash_leaf(body); - // Proof hashes are already Vec - let proof_hashes = &proof.hashes; + // Proof hashes are Vec>, convert to Vec + let proof_hashes: Vec = proof + .hashes + .iter() + .map(|h| Sha256Hash::try_from_slice(h).unwrap()) + .collect(); - // Root hash is already a Sha256Hash - let root_hash = &proof.root_hash; + // Root hash is Vec, convert to Sha256Hash + let root_hash = Sha256Hash::try_from_slice(&proof.root_hash).unwrap(); // Verify the inclusion proof - let leaf_index: u64 = proof.log_index.as_u64().unwrap(); - let tree_size: u64 = proof.tree_size.parse().unwrap(); + let leaf_index: u64 = proof.log_index as u64; + let tree_size: u64 = proof.tree_size as u64; - let result = verify_inclusion_proof(&leaf_hash, leaf_index, tree_size, proof_hashes, root_hash); + let result = + verify_inclusion_proof(&leaf_hash, leaf_index, tree_size, &proof_hashes, &root_hash); assert!( result.is_ok(), diff --git a/crates/sigstore-bundle/tests/bundle_versions_tests.rs b/crates/sigstore-bundle/tests/bundle_versions_tests.rs index c0ce6ae..1cf9efb 100644 --- a/crates/sigstore-bundle/tests/bundle_versions_tests.rs +++ b/crates/sigstore-bundle/tests/bundle_versions_tests.rs @@ -3,7 +3,7 @@ //! These tests use real bundle fixtures from sigstore-rs and sigstore-python. use sigstore_bundle::{validate_bundle, validate_bundle_with_options, ValidationOptions}; -use sigstore_types::Bundle; +use sigstore_types::{Bundle, BundleExt}; /// v0.1 bundle with x509CertificateChain (from sigstore-rs) const V01_BUNDLE: &str = r#"{ @@ -72,12 +72,11 @@ fn test_parse_v01_bundle() { assert_eq!(version, sigstore_types::MediaType::Bundle0_1); // Check it has x509CertificateChain (not single certificate) - match &bundle.verification_material.content { - sigstore_types::bundle::VerificationMaterialContent::X509CertificateChain { - certificates, - } => { + let vm = bundle.verification_material.as_ref().unwrap(); + match &vm.content { + Some(sigstore_types::bundle::VerificationMaterialContent::X509CertificateChain(chain)) => { assert!( - !certificates.is_empty(), + !chain.certificates.is_empty(), "Certificate chain should not be empty" ); } @@ -133,10 +132,11 @@ fn test_parse_v03_bundle() { assert_eq!(version, sigstore_types::MediaType::Bundle0_3); // Check it has single certificate (not chain) - match &bundle.verification_material.content { - sigstore_types::bundle::VerificationMaterialContent::Certificate(cert) => { + let vm = bundle.verification_material.as_ref().unwrap(); + match &vm.content { + Some(sigstore_types::bundle::VerificationMaterialContent::Certificate(cert)) => { assert!( - !cert.raw_bytes.as_bytes().is_empty(), + !cert.raw_bytes.is_empty(), "Certificate should not be empty" ); } @@ -161,7 +161,8 @@ fn test_v03_bundle_with_timestamp() { let bundle = Bundle::from_json(V03_BUNDLE_WITH_TIMESTAMP).expect("Failed to parse bundle"); // Should have timestamp verification data - let tvd = &bundle.verification_material.timestamp_verification_data; + let vm = bundle.verification_material.as_ref().unwrap(); + let tvd = vm.timestamp_verification_data.as_ref().unwrap(); assert!( !tvd.rfc3161_timestamps.is_empty(), "Should have RFC3161 timestamps" @@ -193,10 +194,9 @@ fn test_v03_bundle_serialization_roundtrip() { // Compare assert_eq!(bundle.media_type, bundle2.media_type); - assert_eq!( - bundle.verification_material.tlog_entries.len(), - bundle2.verification_material.tlog_entries.len() - ); + let vm1 = bundle.verification_material.as_ref().unwrap(); + let vm2 = bundle2.verification_material.as_ref().unwrap(); + assert_eq!(vm1.tlog_entries.len(), vm2.tlog_entries.len()); } // ==== Version Comparison Tests ==== @@ -207,15 +207,17 @@ fn test_v01_vs_v03_certificate_format() { let v03 = Bundle::from_json(V03_BUNDLE_WITH_PROOF).expect("Failed to parse v0.3"); // v0.1 uses X509CertificateChain + let vm01 = v01.verification_material.as_ref().unwrap(); assert!(matches!( - v01.verification_material.content, - sigstore_types::bundle::VerificationMaterialContent::X509CertificateChain { .. } + &vm01.content, + Some(sigstore_types::bundle::VerificationMaterialContent::X509CertificateChain(_)) )); // v0.3 uses single Certificate + let vm03 = v03.verification_material.as_ref().unwrap(); assert!(matches!( - v03.verification_material.content, - sigstore_types::bundle::VerificationMaterialContent::Certificate(_) + &vm03.content, + Some(sigstore_types::bundle::VerificationMaterialContent::Certificate(_)) )); } diff --git a/crates/sigstore-conformance/src/main.rs b/crates/sigstore-conformance/src/main.rs index 0f4a5a1..e9e05ec 100644 --- a/crates/sigstore-conformance/src/main.rs +++ b/crates/sigstore-conformance/src/main.rs @@ -10,9 +10,9 @@ use sigstore_crypto::KeyPair; use sigstore_fulcio::FulcioClient; use sigstore_oidc::IdentityToken; use sigstore_rekor::RekorClient; -use sigstore_trust_root::{SigningConfig, TrustedRoot}; +use sigstore_trust_root::{SigningConfig, SigningConfigExt, TrustedRoot, TrustedRootExt}; use sigstore_tsa::TimestampClient; -use sigstore_types::{Bundle, Sha256Hash, SignatureContent}; +use sigstore_types::{Bundle, BundleExt, Sha256Hash}; use sigstore_verify::{verify, VerificationPolicy}; use std::env; @@ -189,7 +189,7 @@ async fn sign_bundle(args: &[String]) -> Result<(), Box> // Timestamp the signature let timestamp = tsa_client.timestamp_signature(&signature).await?; - bundle = bundle.with_rfc3161_timestamp(timestamp); + bundle = bundle.with_rfc3161_timestamp(timestamp.as_bytes().to_vec()); } let bundle = bundle.into_bundle(); @@ -298,18 +298,18 @@ fn verify_bundle(args: &[String]) -> Result<(), Box> { } // Extract expected hash from bundle + use sigstore_types::BundleContent; let expected_hash = match &bundle.content { - SignatureContent::MessageSignature(msg_sig) => { + Some(BundleContent::MessageSignature(msg_sig)) => { if let Some(digest) = &msg_sig.message_digest { - digest.digest.as_bytes().to_vec() + digest.digest.clone() } else { return Err("Bundle does not contain message digest for verification".into()); } } - SignatureContent::DsseEnvelope(envelope) => { + Some(BundleContent::DsseEnvelope(envelope)) => { if envelope.payload_type == "application/vnd.in-toto+json" { - let payload_bytes = envelope.payload.as_bytes(); - let payload_str = String::from_utf8(payload_bytes.to_vec()) + let payload_str = String::from_utf8(envelope.payload.clone()) .map_err(|e| format!("Invalid UTF-8 in payload: {}", e))?; let statement: serde_json::Value = serde_json::from_str(&payload_str) .map_err(|e| format!("Failed to parse statement: {}", e))?; @@ -337,6 +337,9 @@ fn verify_bundle(args: &[String]) -> Result<(), Box> { return Err("DSSE envelope does not contain in-toto statement".into()); } } + None => { + return Err("Bundle does not contain any content".into()); + } }; // Verify that the provided digest matches the one in the bundle diff --git a/crates/sigstore-rekor/src/client.rs b/crates/sigstore-rekor/src/client.rs index d86d744..32dbc71 100644 --- a/crates/sigstore-rekor/src/client.rs +++ b/crates/sigstore-rekor/src/client.rs @@ -245,14 +245,20 @@ impl RekorClient { root_hash: p.root_hash.to_hex(), tree_size: p.tree_size.parse::().unwrap_or_default(), }), - signed_entry_timestamp: entry_v2.inclusion_promise.map(|p| p.signed_entry_timestamp), + // Convert Vec to SignedTimestamp + signed_entry_timestamp: entry_v2 + .inclusion_promise + .map(|p| sigstore_types::SignedTimestamp::from_bytes(&p.signed_entry_timestamp)), }); + // Convert Vec log key ID to hex string (HexLogId expects hex, not base64) + let log_id_hex = hex::encode(&entry_v2.log_id.key_id); + Ok(LogEntry { uuid: Default::default(), // V2 response doesn't include UUID in body body: entry_v2.canonicalized_body, integrated_time, - log_id: entry_v2.log_id.key_id.into_string().into(), + log_id: log_id_hex.into(), log_index, verification, }) @@ -335,14 +341,20 @@ impl RekorClient { root_hash: p.root_hash.to_hex(), tree_size: p.tree_size.parse::().unwrap_or_default(), }), - signed_entry_timestamp: entry_v2.inclusion_promise.map(|p| p.signed_entry_timestamp), + // Convert Vec to SignedTimestamp + signed_entry_timestamp: entry_v2 + .inclusion_promise + .map(|p| sigstore_types::SignedTimestamp::from_bytes(&p.signed_entry_timestamp)), }); + // Convert Vec log key ID to hex string (HexLogId expects hex, not base64) + let log_id_hex = hex::encode(&entry_v2.log_id.key_id); + Ok(LogEntry { uuid: Default::default(), // V2 response doesn't include UUID in body body: entry_v2.canonicalized_body, integrated_time, - log_id: entry_v2.log_id.key_id.into_string().into(), + log_id: log_id_hex.into(), log_index, verification, }) diff --git a/crates/sigstore-rekor/src/entry.rs b/crates/sigstore-rekor/src/entry.rs index 1f4dea8..fd54fa5 100644 --- a/crates/sigstore-rekor/src/entry.rs +++ b/crates/sigstore-rekor/src/entry.rs @@ -2,11 +2,31 @@ use serde::{Deserialize, Serialize}; use sigstore_types::{ - CanonicalizedBody, CheckpointData, DerCertificate, EntryUuid, HashAlgorithm, HexLogId, - InclusionPromise, KindVersion, LogId, PemContent, Sha256Hash, SignatureBytes, SignedTimestamp, + CanonicalizedBody, DerCertificate, EntryUuid, HashAlgorithm, HexLogId, KindVersion, LogId, + PemContent, Sha256Hash, SignatureBytes, SignedTimestamp, }; use std::collections::HashMap; +/// Inclusion promise (SET) from Rekor response +/// Re-exported from protobuf for convenience +pub use sigstore_types::InclusionPromise; + +/// Checkpoint data in Rekor V2 inclusion proof +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct CheckpointData { + /// Text representation of the checkpoint + #[serde(default)] + pub envelope: String, +} + +impl CheckpointData { + /// Check if checkpoint data is empty + pub fn is_empty(&self) -> bool { + self.envelope.is_empty() + } +} + /// Rekor API version #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub enum RekorApiVersion { diff --git a/crates/sigstore-sign/examples/sign_blob.rs b/crates/sigstore-sign/examples/sign_blob.rs index 192b1d0..f81d60a 100644 --- a/crates/sigstore-sign/examples/sign_blob.rs +++ b/crates/sigstore-sign/examples/sign_blob.rs @@ -39,6 +39,7 @@ use sigstore_oidc::{get_ambient_token, get_identity_token, is_ci_environment, IdentityToken}; use sigstore_rekor::RekorApiVersion; use sigstore_sign::{SigningConfig, SigningContext}; +use sigstore_types::BundleExt; use std::env; use std::fs; @@ -189,15 +190,20 @@ async fn main() { println!(" Media Type: {}", bundle.media_type); // Print tlog entry info - if let Some(entry) = bundle.verification_material.tlog_entries.first() { - println!( - " Entry Kind: {} v{}", - entry.kind_version.kind, entry.kind_version.version - ); - println!(" Log Index: {}", entry.log_index); - // For V2, integrated_time is always 0 - RFC3161 timestamps are used instead - if let Ok(ts) = entry.integrated_time.parse::() { - if ts == 0 && entry.kind_version.version == "0.0.2" { + if let Some(vm) = bundle.verification_material.as_ref() { + if let Some(entry) = vm.tlog_entries.first() { + if let Some(kv) = entry.kind_version.as_ref() { + println!(" Entry Kind: {} v{}", kv.kind, kv.version); + } + println!(" Log Index: {}", entry.log_index); + // For V2, integrated_time is always 0 - RFC3161 timestamps are used instead + let ts = entry.integrated_time; + let version = entry + .kind_version + .as_ref() + .map(|kv| kv.version.as_str()) + .unwrap_or(""); + if ts == 0 && version == "0.0.2" { println!(" Integrated Time: (V2 uses RFC3161 timestamps)"); } else { use chrono::{DateTime, Utc}; @@ -205,21 +211,22 @@ async fn main() { println!(" Integrated Time: {}", dt); } } - } - // Show if we have inclusion proof (V2) vs just promise (V1) - if entry.inclusion_proof.is_some() { - println!(" Inclusion Proof: yes (with checkpoint)"); - } else if entry.inclusion_promise.is_some() { - println!(" Inclusion Promise: yes (SET)"); + // Show if we have inclusion proof (V2) vs just promise (V1) + if entry.inclusion_proof.is_some() { + println!(" Inclusion Proof: yes (with checkpoint)"); + } else if entry.inclusion_promise.is_some() { + println!(" Inclusion Promise: yes (SET)"); + } } } // Print RFC3161 timestamp info let ts_count = bundle .verification_material - .timestamp_verification_data - .rfc3161_timestamps - .len(); + .as_ref() + .and_then(|vm| vm.timestamp_verification_data.as_ref()) + .map(|tvd| tvd.rfc3161_timestamps.len()) + .unwrap_or(0); if ts_count > 0 { println!(" RFC3161 Timestamps: {}", ts_count); } else { diff --git a/crates/sigstore-sign/src/lib.rs b/crates/sigstore-sign/src/lib.rs index a049b14..2bc67e0 100644 --- a/crates/sigstore-sign/src/lib.rs +++ b/crates/sigstore-sign/src/lib.rs @@ -7,6 +7,7 @@ //! ```no_run //! use sigstore_sign::{SigningContext, SigningConfig}; //! use sigstore_oidc::IdentityToken; +//! use sigstore_types::BundleExt; //! //! # async fn example() -> Result<(), Box> { //! let context = SigningContext::production(); diff --git a/crates/sigstore-sign/src/sign.rs b/crates/sigstore-sign/src/sign.rs index c9478b0..01a8c68 100644 --- a/crates/sigstore-sign/src/sign.rs +++ b/crates/sigstore-sign/src/sign.rs @@ -10,10 +10,10 @@ use sigstore_oidc::IdentityToken; use sigstore_rekor::{ DsseEntry, DsseEntryV2, HashedRekord, HashedRekordV2, RekorApiVersion, RekorClient, }; -use sigstore_trust_root::SigningConfig as TufSigningConfig; +use sigstore_trust_root::{SigningConfig as TufSigningConfig, SigningConfigExt}; use sigstore_tsa::TimestampClient; use sigstore_types::{ - Artifact, Bundle, DerCertificate, DsseEnvelope, DsseSignature, KeyId, PayloadBytes, Sha256Hash, + Artifact, Bundle, DerCertificate, DsseEnvelope, DsseSignature, PayloadBytes, Sha256Hash, SignatureBytes, Subject, TimestampToken, }; @@ -252,7 +252,7 @@ impl Signer { .with_tlog_entry(tlog_entry.build()); if let Some(ts) = timestamp { - bundle = bundle.with_rfc3161_timestamp(ts); + bundle = bundle.with_rfc3161_timestamp(ts.as_bytes().to_vec()); } Ok(bundle.into_bundle()) @@ -388,14 +388,14 @@ impl Signer { let pae = sigstore_types::pae(&payload_type, statement_json.as_bytes()); let signature = key_pair.sign(&pae)?; - let dsse_envelope = DsseEnvelope::new( + let dsse_envelope = DsseEnvelope { payload_type, - payload, - vec![DsseSignature { - sig: signature.clone(), - keyid: KeyId::default(), + payload: payload.as_bytes().to_vec(), + signatures: vec![DsseSignature { + sig: signature.as_bytes().to_vec(), + keyid: String::new(), }], - ); + }; // 5. Create DSSE Rekor entry let tlog_entry = self @@ -414,7 +414,7 @@ impl Signer { .with_tlog_entry(tlog_entry.build()); if let Some(ts) = timestamp { - bundle = bundle.with_rfc3161_timestamp(ts); + bundle = bundle.with_rfc3161_timestamp(ts.as_bytes().to_vec()); } Ok(bundle.into_bundle()) diff --git a/crates/sigstore-trust-root/Cargo.toml b/crates/sigstore-trust-root/Cargo.toml index 64910b6..dab3b0a 100644 --- a/crates/sigstore-trust-root/Cargo.toml +++ b/crates/sigstore-trust-root/Cargo.toml @@ -23,6 +23,9 @@ thiserror = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +# Sigstore protobuf specs +sigstore_protobuf_specs = { workspace = true } + # Crypto & Certificates base64 = { workspace = true } x509-cert = { workspace = true } diff --git a/crates/sigstore-trust-root/src/lib.rs b/crates/sigstore-trust-root/src/lib.rs index 0683186..b6657ad 100644 --- a/crates/sigstore-trust-root/src/lib.rs +++ b/crates/sigstore-trust-root/src/lib.rs @@ -1,7 +1,7 @@ //! Sigstore trusted root parsing and management //! //! This crate provides functionality to parse and manage Sigstore trusted root bundles -//! and signing configuration. +//! and signing configuration, using the official protobuf specs. //! //! ## Trusted Root //! @@ -22,13 +22,12 @@ //! # Features //! //! - `tuf` - Enable TUF (The Update Framework) support for securely fetching -//! trusted roots from Sigstore's TUF repository. This adds async methods -//! like [`TrustedRoot::from_tuf()`] and [`TrustedRoot::from_tuf_staging()`]. +//! trusted roots from Sigstore's TUF repository. //! //! # Example //! //! ```no_run -//! use sigstore_trust_root::{TrustedRoot, SigningConfig}; +//! use sigstore_trust_root::{TrustedRoot, TrustedRootExt, SigningConfig, SigningConfigExt}; //! //! // Load embedded production trusted root //! let root = TrustedRoot::production().unwrap(); @@ -45,7 +44,7 @@ //! With the `tuf` feature enabled: //! //! ```ignore -//! use sigstore_trust_root::{TrustedRoot, SigningConfig}; +//! use sigstore_trust_root::{TrustedRoot, TrustedRootExt, SigningConfig, SigningConfigExt}; //! //! // Fetch via TUF protocol (secure, up-to-date) //! let root = TrustedRoot::from_tuf().await?; @@ -60,18 +59,23 @@ pub mod trusted_root; pub mod tuf; pub use error::{Error, Result}; + +// Re-export protobuf types and extension traits from signing_config pub use signing_config::{ - ServiceConfiguration, ServiceEndpoint, ServiceSelector, ServiceValidityPeriod, SigningConfig, + Service, ServiceConfiguration, ServiceExt, ServiceSelector, SigningConfig, SigningConfigExt, SIGNING_CONFIG_MEDIA_TYPE, SIGSTORE_PRODUCTION_SIGNING_CONFIG, SIGSTORE_STAGING_SIGNING_CONFIG, SUPPORTED_FULCIO_VERSIONS, SUPPORTED_REKOR_VERSIONS, SUPPORTED_TSA_VERSIONS, }; + +// Re-export protobuf types and extension traits from trusted_root pub use trusted_root::{ - CertificateAuthority, CertificateTransparencyLog, TimestampAuthority, TransparencyLog, - TrustedRoot, ValidityPeriod, SIGSTORE_PRODUCTION_TRUSTED_ROOT, SIGSTORE_STAGING_TRUSTED_ROOT, + CertificateAuthority, DistinguishedName, ProtoHashAlgorithm, ProtoLogId, PublicKey, TimeRange, + TransparencyLogInstance, TrustedRoot, TrustedRootExt, X509Certificate, X509CertificateChain, + SIGSTORE_PRODUCTION_TRUSTED_ROOT, SIGSTORE_STAGING_TRUSTED_ROOT, }; #[cfg(feature = "tuf")] pub use tuf::{ - TufConfig, DEFAULT_TUF_URL, PRODUCTION_TUF_ROOT, SIGNING_CONFIG_TARGET, STAGING_TUF_ROOT, - STAGING_TUF_URL, TRUSTED_ROOT_TARGET, + TufConfig, TufSigningConfigExt, TufTrustedRootExt, DEFAULT_TUF_URL, PRODUCTION_TUF_ROOT, + SIGNING_CONFIG_TARGET, STAGING_TUF_ROOT, STAGING_TUF_URL, TRUSTED_ROOT_TARGET, }; diff --git a/crates/sigstore-trust-root/src/signing_config.rs b/crates/sigstore-trust-root/src/signing_config.rs index 9830f55..213f0cb 100644 --- a/crates/sigstore-trust-root/src/signing_config.rs +++ b/crates/sigstore-trust-root/src/signing_config.rs @@ -1,31 +1,17 @@ //! Signing configuration for Sigstore instances //! -//! This module provides functionality to parse and manage Sigstore signing configuration -//! which specifies the service endpoints for signing operations: -//! - Fulcio CA URLs for certificate issuance -//! - Rekor transparency log URLs for log entry submission -//! - TSA URLs for RFC 3161 timestamp requests -//! - OIDC provider URLs for authentication -//! -//! # Example -//! -//! ```no_run -//! use sigstore_trust_root::SigningConfig; -//! -//! // Load embedded production signing config -//! let config = SigningConfig::production().unwrap(); -//! -//! // Get the best Rekor endpoint (highest available version) -//! if let Some(rekor) = config.get_rekor_url(None) { -//! println!("Rekor URL: {} (v{})", rekor.url, rekor.major_api_version); -//! } -//! ``` +//! This module re-exports the official Sigstore protobuf types and provides +//! extension methods for selecting service endpoints. -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; +use chrono::{DateTime, TimeZone, Utc}; use crate::{Error, Result}; +// Re-export protobuf types +pub use sigstore_protobuf_specs::dev::sigstore::trustroot::v1::{ + Service, ServiceConfiguration, ServiceSelector, SigningConfig, +}; + /// Embedded production signing config pub const SIGSTORE_PRODUCTION_SIGNING_CONFIG: &str = include_str!("../repository/signing_config.json"); @@ -46,140 +32,133 @@ pub const SUPPORTED_FULCIO_VERSIONS: &[u32] = &[1]; /// Expected media type for signing config v0.2 pub const SIGNING_CONFIG_MEDIA_TYPE: &str = "application/vnd.dev.sigstore.signingconfig.v0.2+json"; -/// Validity period for a service -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ServiceValidityPeriod { - /// Start time of validity - pub start: DateTime, - /// End time of validity (optional, None means still valid) - #[serde(default)] - pub end: Option>, +/// Extension trait for Service with helper methods +pub trait ServiceExt { + /// Check if this service is currently valid + fn is_valid(&self) -> bool; + + /// Get the validity start time + fn valid_from(&self) -> Option>; + + /// Get the validity end time + fn valid_until(&self) -> Option>; } -impl ServiceValidityPeriod { - /// Check if this period is currently valid - pub fn is_valid(&self) -> bool { +impl ServiceExt for Service { + fn is_valid(&self) -> bool { let now = Utc::now(); - if now < self.start { - return false; + + let Some(valid_for) = &self.valid_for else { + return true; + }; + + // Check start time + if let Some(start) = &valid_for.start { + if let Some(start_dt) = Utc + .timestamp_opt(start.seconds, start.nanos as u32) + .single() + { + if now < start_dt { + return false; + } + } } - if let Some(end) = self.end { - if now >= end { - return false; + + // Check end time + if let Some(end) = &valid_for.end { + if let Some(end_dt) = Utc.timestamp_opt(end.seconds, end.nanos as u32).single() { + if now >= end_dt { + return false; + } } } + true } -} - -/// A service endpoint configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ServiceEndpoint { - /// URL of the service - pub url: String, - /// Major API version supported by this endpoint - pub major_api_version: u32, - /// Validity period for this endpoint - pub valid_for: ServiceValidityPeriod, - /// Operator of this service - #[serde(default)] - pub operator: Option, -} -impl ServiceEndpoint { - /// Check if this endpoint is currently valid - pub fn is_valid(&self) -> bool { - self.valid_for.is_valid() + fn valid_from(&self) -> Option> { + self.valid_for.as_ref().and_then(|vf| { + vf.start + .as_ref() + .and_then(|t| Utc.timestamp_opt(t.seconds, t.nanos as u32).single()) + }) } -} -/// Service selector configuration -#[derive(Debug, Clone, Serialize, Deserialize, Default)] -#[serde(rename_all = "SCREAMING_SNAKE_CASE")] -pub enum ServiceSelector { - /// Use any available service - #[default] - Any, - /// Use exactly the specified number of services - Exact, + fn valid_until(&self) -> Option> { + self.valid_for.as_ref().and_then(|vf| { + vf.end + .as_ref() + .and_then(|t| Utc.timestamp_opt(t.seconds, t.nanos as u32).single()) + }) + } } -/// Service configuration -#[derive(Debug, Clone, Serialize, Deserialize, Default)] -pub struct ServiceConfiguration { - /// How to select services - #[serde(default)] - pub selector: ServiceSelector, - /// Number of services to use (for EXACT selector) - #[serde(default)] - pub count: Option, -} +/// Extension trait for SigningConfig with helper methods +pub trait SigningConfigExt { + /// Parse signing config from JSON + fn from_json(json: &str) -> Result; -/// Signing configuration for a Sigstore instance -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SigningConfig { - /// Media type of this configuration - pub media_type: String, - /// Fulcio CA URLs - #[serde(default)] - pub ca_urls: Vec, - /// OIDC provider URLs - #[serde(default)] - pub oidc_urls: Vec, - /// Rekor transparency log URLs - #[serde(default)] - pub rekor_tlog_urls: Vec, - /// Timestamp authority URLs - #[serde(default)] - pub tsa_urls: Vec, - /// Rekor tlog configuration - #[serde(default)] - pub rekor_tlog_config: ServiceConfiguration, - /// TSA configuration - #[serde(default)] - pub tsa_config: ServiceConfiguration, -} + /// Parse signing config from a file + fn from_file(path: &str) -> Result; -impl SigningConfig { /// Load the embedded production signing config - pub fn production() -> Result { - Self::from_json(SIGSTORE_PRODUCTION_SIGNING_CONFIG) - } + fn production() -> Result; /// Load the embedded staging signing config - pub fn staging() -> Result { - Self::from_json(SIGSTORE_STAGING_SIGNING_CONFIG) - } + fn staging() -> Result; - /// Parse signing config from JSON - pub fn from_json(json: &str) -> Result { + /// Get valid Rekor endpoints, optionally filtered by version + fn get_rekor_urls(&self, force_version: Option) -> Vec<&Service>; + + /// Get the best Rekor endpoint (highest version available) + fn get_rekor_url(&self, force_version: Option) -> Option<&Service>; + + /// Get valid Fulcio endpoints + fn get_fulcio_urls(&self) -> Vec<&Service>; + + /// Get the best Fulcio endpoint + fn get_fulcio_url(&self) -> Option<&Service>; + + /// Get valid TSA endpoints + fn get_tsa_urls(&self) -> Vec<&Service>; + + /// Get the best TSA endpoint + fn get_tsa_url(&self) -> Option<&Service>; + + /// Get valid OIDC provider URLs + fn get_oidc_urls(&self) -> Vec<&Service>; + + /// Get the best OIDC provider URL + fn get_oidc_url(&self) -> Option<&Service>; +} + +impl SigningConfigExt for SigningConfig { + fn from_json(json: &str) -> Result { let config: SigningConfig = serde_json::from_str(json)?; // Validate media type if config.media_type != SIGNING_CONFIG_MEDIA_TYPE { - return Err(Error::UnsupportedMediaType(config.media_type)); + return Err(Error::UnsupportedMediaType(config.media_type.clone())); } Ok(config) } - /// Parse signing config from a file - pub fn from_file(path: &str) -> Result { + fn from_file(path: &str) -> Result { let json = std::fs::read_to_string(path) .map_err(|e| Error::MissingField(format!("Failed to read file {}: {}", path, e)))?; Self::from_json(&json) } - /// Get valid Rekor endpoints, optionally filtered by version - /// - /// If `force_version` is Some, only returns endpoints with that major version. - /// Otherwise returns all valid endpoints for supported versions. - /// - /// Endpoints are sorted by version descending (highest first). - pub fn get_rekor_urls(&self, force_version: Option) -> Vec<&ServiceEndpoint> { + fn production() -> Result { + Self::from_json(SIGSTORE_PRODUCTION_SIGNING_CONFIG) + } + + fn staging() -> Result { + Self::from_json(SIGSTORE_STAGING_SIGNING_CONFIG) + } + + fn get_rekor_urls(&self, force_version: Option) -> Vec<&Service> { let mut endpoints: Vec<_> = self .rekor_tlog_urls .iter() @@ -205,46 +184,37 @@ impl SigningConfig { endpoints } - /// Get the best Rekor endpoint (highest version available) - /// - /// If `force_version` is Some, returns the first endpoint with that version. - pub fn get_rekor_url(&self, force_version: Option) -> Option<&ServiceEndpoint> { + fn get_rekor_url(&self, force_version: Option) -> Option<&Service> { self.get_rekor_urls(force_version).first().copied() } - /// Get valid Fulcio endpoints - pub fn get_fulcio_urls(&self) -> Vec<&ServiceEndpoint> { + fn get_fulcio_urls(&self) -> Vec<&Service> { self.ca_urls .iter() .filter(|e| e.is_valid() && SUPPORTED_FULCIO_VERSIONS.contains(&e.major_api_version)) .collect() } - /// Get the best Fulcio endpoint - pub fn get_fulcio_url(&self) -> Option<&ServiceEndpoint> { + fn get_fulcio_url(&self) -> Option<&Service> { self.get_fulcio_urls().first().copied() } - /// Get valid TSA endpoints - pub fn get_tsa_urls(&self) -> Vec<&ServiceEndpoint> { + fn get_tsa_urls(&self) -> Vec<&Service> { self.tsa_urls .iter() .filter(|e| e.is_valid() && SUPPORTED_TSA_VERSIONS.contains(&e.major_api_version)) .collect() } - /// Get the best TSA endpoint - pub fn get_tsa_url(&self) -> Option<&ServiceEndpoint> { + fn get_tsa_url(&self) -> Option<&Service> { self.get_tsa_urls().first().copied() } - /// Get valid OIDC provider URLs - pub fn get_oidc_urls(&self) -> Vec<&ServiceEndpoint> { + fn get_oidc_urls(&self) -> Vec<&Service> { self.oidc_urls.iter().filter(|e| e.is_valid()).collect() } - /// Get the best OIDC provider URL - pub fn get_oidc_url(&self) -> Option<&ServiceEndpoint> { + fn get_oidc_url(&self) -> Option<&Service> { self.get_oidc_urls().first().copied() } } @@ -292,27 +262,4 @@ mod tests { assert_eq!(rekor.major_api_version, 2); } } - - #[test] - fn test_service_validity() { - let valid_period = ServiceValidityPeriod { - start: DateTime::parse_from_rfc3339("2020-01-01T00:00:00Z") - .unwrap() - .into(), - end: None, - }; - assert!(valid_period.is_valid()); - - let expired_period = ServiceValidityPeriod { - start: DateTime::parse_from_rfc3339("2020-01-01T00:00:00Z") - .unwrap() - .into(), - end: Some( - DateTime::parse_from_rfc3339("2021-01-01T00:00:00Z") - .unwrap() - .into(), - ), - }; - assert!(!expired_period.is_valid()); - } } diff --git a/crates/sigstore-trust-root/src/trusted_root.rs b/crates/sigstore-trust-root/src/trusted_root.rs index e05d1ea..0fca681 100644 --- a/crates/sigstore-trust-root/src/trusted_root.rs +++ b/crates/sigstore-trust-root/src/trusted_root.rs @@ -1,12 +1,22 @@ //! Trusted root types and parsing +//! +//! This module re-exports the official Sigstore protobuf types and provides +//! extension methods for common operations. use crate::{Error, Result}; -use chrono::{DateTime, Utc}; +use chrono::{DateTime, TimeZone, Utc}; use rustls_pki_types::CertificateDer; -use serde::{Deserialize, Serialize}; -use sigstore_types::{DerCertificate, DerPublicKey, HashAlgorithm, KeyHint, LogId, LogKeyId}; use std::collections::HashMap; +// Re-export protobuf types +pub use sigstore_protobuf_specs::dev::sigstore::{ + common::v1::{ + DistinguishedName, HashAlgorithm as ProtoHashAlgorithm, LogId as ProtoLogId, PublicKey, + TimeRange, X509Certificate, X509CertificateChain, + }, + trustroot::v1::{CertificateAuthority, TransparencyLogInstance, TrustedRoot}, +}; + /// TSA certificate with optional validity period (start, end) pub type TsaCertWithValidity = ( CertificateDer<'static>, @@ -14,380 +24,274 @@ pub type TsaCertWithValidity = ( Option>, ); -/// A trusted root bundle containing all trust anchors -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct TrustedRoot { - /// Media type of the trusted root - pub media_type: String, - - /// Transparency logs (Rekor) - #[serde(default)] - pub tlogs: Vec, - - /// Certificate authorities (Fulcio) - #[serde(default)] - pub certificate_authorities: Vec, - - /// Certificate Transparency logs - #[serde(default)] - pub ctlogs: Vec, - - /// Timestamp authorities (RFC 3161 TSAs) - #[serde(default)] - pub timestamp_authorities: Vec, -} - -/// A transparency log entry (Rekor) -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct TransparencyLog { - /// Base URL of the transparency log - pub base_url: String, - - /// Hash algorithm used - pub hash_algorithm: HashAlgorithm, - - /// Public key for verification - pub public_key: PublicKey, - - /// Log ID - pub log_id: LogId, -} - -/// A certificate authority entry (Fulcio) -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CertificateAuthority { - /// Subject information - #[serde(default)] - pub subject: CertificateSubject, +/// Embedded production trusted root from +/// This is the default trusted root for Sigstore's public production instance. +pub const SIGSTORE_PRODUCTION_TRUSTED_ROOT: &str = include_str!("trusted_root.json"); - /// URI of the CA - pub uri: String, +/// Embedded staging trusted root from +/// This is the trusted root for Sigstore's staging/testing instance. +pub const SIGSTORE_STAGING_TRUSTED_ROOT: &str = include_str!("trusted_root_staging.json"); - /// Certificate chain - pub cert_chain: CertChain, +/// Extension trait for TrustedRoot with helper methods +pub trait TrustedRootExt { + /// Parse a trusted root from JSON + fn from_json(json: &str) -> Result; - /// Validity period - #[serde(default)] - pub valid_for: Option, -} + /// Load a trusted root from a file + fn from_file(path: impl AsRef) -> Result; -/// A Certificate Transparency log entry -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CertificateTransparencyLog { - /// Base URL of the CT log - pub base_url: String, + /// Load the default Sigstore production trusted root + fn production() -> Result; - /// Hash algorithm used - pub hash_algorithm: HashAlgorithm, + /// Load the Sigstore staging trusted root + fn staging() -> Result; - /// Public key for verification - pub public_key: PublicKey, + /// Get all Fulcio certificate authority certificates + fn fulcio_certs(&self) -> Result>>; - /// Log ID - pub log_id: LogId, -} + /// Get all Rekor public keys mapped by key ID (hex-encoded) + fn rekor_keys(&self) -> Result>>; -/// A timestamp authority entry -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct TimestampAuthority { - /// Subject information - #[serde(default)] - pub subject: CertificateSubject, + /// Get all Rekor public keys with their key hints (4-byte identifiers) + fn rekor_keys_with_hints(&self) -> Result)>>; - /// URI of the TSA - #[serde(default)] - pub uri: Option, + /// Get a specific Rekor public key by log ID (base64-encoded) + fn rekor_key_for_log(&self, log_id: &str) -> Result>; - /// Certificate chain - pub cert_chain: CertChain, + /// Get all Certificate Transparency log public keys mapped by key ID + fn ctfe_keys(&self) -> Result>>; - /// Validity period - #[serde(default)] - pub valid_for: Option, -} + /// Get all Certificate Transparency log public keys with their SHA-256 log IDs + fn ctfe_keys_with_ids(&self) -> Result, Vec)>>; -/// Public key information -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PublicKey { - /// Raw bytes of the public key (DER-encoded) - pub raw_bytes: DerPublicKey, + /// Get all TSA certificates with their validity periods + fn tsa_certs_with_validity(&self) -> Result>; - /// Key details/type - pub key_details: String, + /// Get TSA root certificates (for chain validation) + fn tsa_root_certs(&self) -> Result>>; - /// Validity period for this key - #[serde(default)] - pub valid_for: Option, -} + /// Get TSA intermediate certificates (for chain validation) + fn tsa_intermediate_certs(&self) -> Result>>; -/// Subject information for a certificate. -/// -/// Note: This is different from `sigstore_types::Subject` which represents -/// an in-toto Statement subject (artifact name + digest). -#[derive(Debug, Clone, Default, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CertificateSubject { - /// Organization name - #[serde(default)] - pub organization: Option, - - /// Common name - #[serde(default)] - pub common_name: Option, -} + /// Get TSA leaf certificates (the first certificate in each chain) + fn tsa_leaf_certs(&self) -> Result>>; -/// Certificate chain -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CertChain { - /// Certificates in the chain - pub certificates: Vec, -} + /// Check if a Rekor key ID exists in the trusted root + fn has_rekor_key(&self, key_id: &str) -> bool; -/// A certificate entry -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct CertificateEntry { - /// Raw bytes of the certificate (DER-encoded) - pub raw_bytes: DerCertificate, + /// Check if a timestamp is within any TSA's validity period + fn is_timestamp_within_tsa_validity(&self, timestamp: DateTime) -> bool; } -/// Validity period for a key or certificate -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ValidityPeriod { - /// Start time (ISO 8601) - #[serde(default)] - pub start: Option, - - /// End time (ISO 8601) - #[serde(default)] - pub end: Option, +/// Convert protobuf TimeRange to chrono DateTime +fn time_range_to_datetimes( + range: Option<&TimeRange>, +) -> (Option>, Option>) { + let Some(range) = range else { + return (None, None); + }; + + let start = range + .start + .as_ref() + .and_then(|t| Utc.timestamp_opt(t.seconds, t.nanos as u32).single()); + + let end = range + .end + .as_ref() + .and_then(|t| Utc.timestamp_opt(t.seconds, t.nanos as u32).single()); + + (start, end) } -impl TrustedRoot { - /// Parse a trusted root from JSON - pub fn from_json(json: &str) -> Result { +impl TrustedRootExt for TrustedRoot { + fn from_json(json: &str) -> Result { Ok(serde_json::from_str(json)?) } - /// Load a trusted root from a file - pub fn from_file(path: impl AsRef) -> Result { + fn from_file(path: impl AsRef) -> Result { let json = std::fs::read_to_string(path).map_err(|e| Error::Json(serde_json::Error::io(e)))?; Self::from_json(&json) } - /// Get all Fulcio certificate authority certificates - pub fn fulcio_certs(&self) -> Result>> { + fn production() -> Result { + Self::from_json(SIGSTORE_PRODUCTION_TRUSTED_ROOT) + } + + fn staging() -> Result { + Self::from_json(SIGSTORE_STAGING_TRUSTED_ROOT) + } + + fn fulcio_certs(&self) -> Result>> { let mut certs = Vec::new(); for ca in &self.certificate_authorities { - for cert_entry in &ca.cert_chain.certificates { - certs.push(CertificateDer::from(cert_entry.raw_bytes.as_bytes()).into_owned()); + if let Some(cert_chain) = &ca.cert_chain { + for cert in &cert_chain.certificates { + certs.push(CertificateDer::from(cert.raw_bytes.as_slice()).into_owned()); + } } } Ok(certs) } - /// Get all Rekor public keys mapped by key ID - pub fn rekor_keys(&self) -> Result>> { + fn rekor_keys(&self) -> Result>> { let mut keys = HashMap::new(); for tlog in &self.tlogs { - keys.insert( - tlog.log_id.key_id.to_string(), - tlog.public_key.raw_bytes.as_bytes().to_vec(), - ); + if let (Some(log_id), Some(public_key)) = (&tlog.log_id, &tlog.public_key) { + let key_id_hex = hex::encode(&log_id.key_id); + if let Some(raw_bytes) = &public_key.raw_bytes { + keys.insert(key_id_hex, raw_bytes.clone()); + } + } } Ok(keys) } - /// Get all Rekor public keys with their key hints (4-byte identifiers) - /// - /// Returns a vector of (key_hint, public_key) tuples where key_hint is - /// the first 4 bytes of the keyId from the log_id field. - pub fn rekor_keys_with_hints(&self) -> Result> { + fn rekor_keys_with_hints(&self) -> Result)>> { let mut keys = Vec::new(); for tlog in &self.tlogs { - // Decode the key_id to get the key hint (first 4 bytes) - let key_id_bytes = tlog.log_id.key_id.decode()?; - - if key_id_bytes.len() >= 4 { - let key_hint = KeyHint::new([ - key_id_bytes[0], - key_id_bytes[1], - key_id_bytes[2], - key_id_bytes[3], - ]); - keys.push((key_hint, tlog.public_key.raw_bytes.clone())); + if let (Some(log_id), Some(public_key)) = (&tlog.log_id, &tlog.public_key) { + if log_id.key_id.len() >= 4 { + let key_hint: [u8; 4] = [ + log_id.key_id[0], + log_id.key_id[1], + log_id.key_id[2], + log_id.key_id[3], + ]; + if let Some(raw_bytes) = &public_key.raw_bytes { + keys.push((key_hint, raw_bytes.clone())); + } + } } } Ok(keys) } - /// Get a specific Rekor public key by log ID - pub fn rekor_key_for_log(&self, log_id: &LogKeyId) -> Result { + fn rekor_key_for_log(&self, log_id: &str) -> Result> { + // Try to decode as base64 first, then hex + let log_id_bytes = + base64::Engine::decode(&base64::engine::general_purpose::STANDARD, log_id) + .or_else(|_| hex::decode(log_id)) + .map_err(|_| Error::InvalidKey(format!("invalid log ID encoding: {}", log_id)))?; + for tlog in &self.tlogs { - if &tlog.log_id.key_id == log_id { - return Ok(tlog.public_key.raw_bytes.clone()); + if let Some(tlog_log_id) = &tlog.log_id { + if tlog_log_id.key_id == log_id_bytes { + if let Some(public_key) = &tlog.public_key { + if let Some(raw_bytes) = &public_key.raw_bytes { + return Ok(raw_bytes.clone()); + } + } + } } } Err(Error::KeyNotFound(log_id.to_string())) } - /// Get all Certificate Transparency log public keys mapped by key ID - pub fn ctfe_keys(&self) -> Result> { + fn ctfe_keys(&self) -> Result>> { let mut keys = HashMap::new(); for ctlog in &self.ctlogs { - keys.insert( - ctlog.log_id.key_id.clone(), - ctlog.public_key.raw_bytes.clone(), - ); + if let (Some(log_id), Some(public_key)) = (&ctlog.log_id, &ctlog.public_key) { + let key_id_hex = hex::encode(&log_id.key_id); + if let Some(raw_bytes) = &public_key.raw_bytes { + keys.insert(key_id_hex, raw_bytes.clone()); + } + } } Ok(keys) } - /// Get all Certificate Transparency log public keys with their SHA-256 log IDs - /// Returns a list of (log_id, public_key) pairs where log_id is the SHA-256 hash - /// of the public key (used for matching against SCTs) - pub fn ctfe_keys_with_ids(&self) -> Result, DerPublicKey)>> { + fn ctfe_keys_with_ids(&self) -> Result, Vec)>> { let mut result = Vec::new(); for ctlog in &self.ctlogs { - let key_bytes = ctlog.public_key.raw_bytes.as_bytes(); - // Compute SHA-256 hash of the public key to get the log ID - let log_id = sigstore_crypto::sha256(key_bytes).as_bytes().to_vec(); - result.push((log_id, ctlog.public_key.raw_bytes.clone())); + if let Some(public_key) = &ctlog.public_key { + if let Some(key_bytes) = &public_key.raw_bytes { + // Compute SHA-256 hash of the public key to get the log ID + let log_id = sigstore_crypto::sha256(key_bytes).as_bytes().to_vec(); + result.push((log_id, key_bytes.clone())); + } + } } Ok(result) } - /// Get all TSA certificates with their validity periods - pub fn tsa_certs_with_validity(&self) -> Result> { + fn tsa_certs_with_validity(&self) -> Result> { let mut result = Vec::new(); for tsa in &self.timestamp_authorities { - for cert_entry in &tsa.cert_chain.certificates { - let cert_der = cert_entry.raw_bytes.as_bytes().to_vec(); - - // Parse validity period - let (start, end) = if let Some(valid_for) = &tsa.valid_for { - let start = valid_for - .start - .as_ref() - .and_then(|s| DateTime::parse_from_rfc3339(s).ok()) - .map(|dt| dt.with_timezone(&Utc)); - let end = valid_for - .end - .as_ref() - .and_then(|s| DateTime::parse_from_rfc3339(s).ok()) - .map(|dt| dt.with_timezone(&Utc)); - (start, end) - } else { - (None, None) - }; - - result.push((CertificateDer::from(&cert_der[..]).into_owned(), start, end)); + if let Some(cert_chain) = &tsa.cert_chain { + let (start, end) = time_range_to_datetimes(tsa.valid_for.as_ref()); + + for cert in &cert_chain.certificates { + let cert_der = CertificateDer::from(cert.raw_bytes.as_slice()).into_owned(); + result.push((cert_der, start, end)); + } } } Ok(result) } - /// Get TSA root certificates (for chain validation) - pub fn tsa_root_certs(&self) -> Result>> { + fn tsa_root_certs(&self) -> Result>> { let mut roots = Vec::new(); for tsa in &self.timestamp_authorities { - // The last certificate in the chain is typically the root - if let Some(cert_entry) = tsa.cert_chain.certificates.last() { - roots.push(CertificateDer::from(cert_entry.raw_bytes.as_bytes()).into_owned()); + if let Some(cert_chain) = &tsa.cert_chain { + // The last certificate in the chain is typically the root + if let Some(cert) = cert_chain.certificates.last() { + roots.push(CertificateDer::from(cert.raw_bytes.as_slice()).into_owned()); + } } } Ok(roots) } - /// Get TSA intermediate certificates (for chain validation) - pub fn tsa_intermediate_certs(&self) -> Result>> { + fn tsa_intermediate_certs(&self) -> Result>> { let mut intermediates = Vec::new(); for tsa in &self.timestamp_authorities { - // All certificates except the first (leaf) and last (root) are intermediates - let chain_len = tsa.cert_chain.certificates.len(); - if chain_len > 2 { - for cert_entry in &tsa.cert_chain.certificates[1..chain_len - 1] { - intermediates - .push(CertificateDer::from(cert_entry.raw_bytes.as_bytes()).into_owned()); + if let Some(cert_chain) = &tsa.cert_chain { + let chain_len = cert_chain.certificates.len(); + if chain_len > 2 { + for cert in &cert_chain.certificates[1..chain_len - 1] { + intermediates + .push(CertificateDer::from(cert.raw_bytes.as_slice()).into_owned()); + } } } } Ok(intermediates) } - /// Get TSA leaf certificates (the first certificate in each chain) - /// These are the actual TSA signing certificates - pub fn tsa_leaf_certs(&self) -> Result>> { + fn tsa_leaf_certs(&self) -> Result>> { let mut leaves = Vec::new(); for tsa in &self.timestamp_authorities { - // The first certificate in the chain is the leaf (TSA signing cert) - if let Some(cert_entry) = tsa.cert_chain.certificates.first() { - leaves.push(CertificateDer::from(cert_entry.raw_bytes.as_bytes()).into_owned()); + if let Some(cert_chain) = &tsa.cert_chain { + if let Some(cert) = cert_chain.certificates.first() { + leaves.push(CertificateDer::from(cert.raw_bytes.as_slice()).into_owned()); + } } } Ok(leaves) } - /// Check if a Rekor key ID exists in the trusted root - pub fn has_rekor_key(&self, key_id: &LogKeyId) -> bool { - self.tlogs.iter().any(|tlog| &tlog.log_id.key_id == key_id) - } - - /// Get the validity period for a TSA at a given time - pub fn tsa_validity_for_time( - &self, - timestamp: DateTime, - ) -> Result, DateTime)>> { - for tsa in &self.timestamp_authorities { - if let Some(valid_for) = &tsa.valid_for { - let start = valid_for - .start - .as_ref() - .and_then(|s| DateTime::parse_from_rfc3339(s).ok()) - .map(|dt| dt.with_timezone(&Utc)); - let end = valid_for - .end - .as_ref() - .and_then(|s| DateTime::parse_from_rfc3339(s).ok()) - .map(|dt| dt.with_timezone(&Utc)); - - // Check if timestamp falls within this TSA's validity - if let (Some(start_time), Some(end_time)) = (start, end) { - if timestamp >= start_time && timestamp <= end_time { - return Ok(Some((start_time, end_time))); - } - } else if let Some(start_time) = start { - // Only start time specified, check if after start - if timestamp >= start_time { - return Ok(start.zip(end)); - } - } - } - } - Ok(None) + fn has_rekor_key(&self, key_id: &str) -> bool { + // Try to decode as base64 first, then hex + let Ok(key_id_bytes) = + base64::Engine::decode(&base64::engine::general_purpose::STANDARD, key_id) + .or_else(|_| hex::decode(key_id)) + else { + return false; + }; + + self.tlogs.iter().any(|tlog| { + tlog.log_id + .as_ref() + .map(|id| id.key_id == key_id_bytes) + .unwrap_or(false) + }) } - /// Check if a timestamp is within any TSA's validity period from the trust root - /// - /// Returns true if: - /// - There are no timestamp authorities configured (no TSA verification) - /// - Any TSA has no `valid_for` field (open-ended validity) - /// - The timestamp falls within at least one TSA's `valid_for` period - /// - /// Returns false only if there are TSAs with validity constraints and the - /// timestamp doesn't fall within any of them. - pub fn is_timestamp_within_tsa_validity(&self, timestamp: DateTime) -> bool { + fn is_timestamp_within_tsa_validity(&self, timestamp: DateTime) -> bool { // If no TSAs are configured, no validity check needed if self.timestamp_authorities.is_empty() { return true; @@ -399,18 +303,8 @@ impl TrustedRoot { return true; }; - let start = valid_for - .start - .as_ref() - .and_then(|s| DateTime::parse_from_rfc3339(s).ok()) - .map(|dt| dt.with_timezone(&Utc)); - let end = valid_for - .end - .as_ref() - .and_then(|s| DateTime::parse_from_rfc3339(s).ok()) - .map(|dt| dt.with_timezone(&Utc)); + let (start, end) = time_range_to_datetimes(Some(valid_for)); - // Check if timestamp falls within this TSA's validity period let after_start = start.map_or(true, |s| timestamp >= s); let before_end = end.map_or(true, |e| timestamp <= e); @@ -424,76 +318,10 @@ impl TrustedRoot { } } -/// Embedded production trusted root from -/// This is the default trusted root for Sigstore's public production instance. -pub const SIGSTORE_PRODUCTION_TRUSTED_ROOT: &str = include_str!("trusted_root.json"); - -/// Embedded staging trusted root from -/// This is the trusted root for Sigstore's staging/testing instance. -pub const SIGSTORE_STAGING_TRUSTED_ROOT: &str = include_str!("trusted_root_staging.json"); - -impl TrustedRoot { - /// Load the default Sigstore production trusted root - pub fn production() -> Result { - Self::from_json(SIGSTORE_PRODUCTION_TRUSTED_ROOT) - } - - /// Load the Sigstore staging trusted root - /// - /// This is useful for testing against the Sigstore staging environment - /// at . - pub fn staging() -> Result { - Self::from_json(SIGSTORE_STAGING_TRUSTED_ROOT) - } -} - #[cfg(test)] mod tests { use super::*; - const SAMPLE_TRUSTED_ROOT: &str = r#"{ - "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1", - "tlogs": [{ - "baseUrl": "https://rekor.sigstore.dev", - "hashAlgorithm": "SHA2_256", - "publicKey": { - "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEYI4heOTrNrZO27elFE8ynfrdPMikttRkbe+vJKQ50G6bfwQ3WyhLpRwwwohelDAm8xRzJ56nYsIa3VHivVvpmA==", - "keyDetails": "PKIX_ECDSA_P256_SHA_256" - }, - "logId": { - "keyId": "test-key-id" - } - }], - "certificateAuthorities": [], - "ctlogs": [], - "timestampAuthorities": [] - }"#; - - #[test] - fn test_parse_trusted_root() { - let root = TrustedRoot::from_json(SAMPLE_TRUSTED_ROOT).unwrap(); - assert_eq!(root.tlogs.len(), 1); - assert_eq!( - root.tlogs[0].log_id.key_id, - LogKeyId::new("test-key-id".to_string()) - ); - } - - #[test] - fn test_rekor_keys() { - let root = TrustedRoot::from_json(SAMPLE_TRUSTED_ROOT).unwrap(); - let keys = root.rekor_keys().unwrap(); - assert_eq!(keys.len(), 1); - assert!(keys.contains_key("test-key-id")); - } - - #[test] - fn test_has_rekor_key() { - let root = TrustedRoot::from_json(SAMPLE_TRUSTED_ROOT).unwrap(); - assert!(root.has_rekor_key(&LogKeyId::new("test-key-id".to_string()))); - assert!(!root.has_rekor_key(&LogKeyId::new("non-existent".to_string()))); - } - #[test] fn test_production_trusted_root() { let root = TrustedRoot::production().unwrap(); @@ -508,7 +336,26 @@ mod tests { assert!(!root.tlogs.is_empty()); assert!(!root.certificate_authorities.is_empty()); assert!(!root.ctlogs.is_empty()); - // Staging should have different URLs from production - assert!(root.tlogs[0].base_url.contains("sigstage.dev")); + } + + #[test] + fn test_fulcio_certs() { + let root = TrustedRoot::production().unwrap(); + let certs = root.fulcio_certs().unwrap(); + assert!(!certs.is_empty()); + } + + #[test] + fn test_rekor_keys() { + let root = TrustedRoot::production().unwrap(); + let keys = root.rekor_keys().unwrap(); + assert!(!keys.is_empty()); + } + + #[test] + fn test_ctfe_keys() { + let root = TrustedRoot::production().unwrap(); + let keys = root.ctfe_keys().unwrap(); + assert!(!keys.is_empty()); } } diff --git a/crates/sigstore-trust-root/src/tuf.rs b/crates/sigstore-trust-root/src/tuf.rs index 785bfdc..772555d 100644 --- a/crates/sigstore-trust-root/src/tuf.rs +++ b/crates/sigstore-trust-root/src/tuf.rs @@ -6,7 +6,7 @@ //! # Example //! //! ```no_run -//! use sigstore_trust_root::{TrustedRoot, SigningConfig}; +//! use sigstore_trust_root::{TrustedRoot, TrustedRootExt, SigningConfig, SigningConfigExt, TufTrustedRootExt, TufSigningConfigExt}; //! //! # async fn example() -> Result<(), sigstore_trust_root::Error> { //! // Fetch trusted root via TUF from production Sigstore @@ -27,7 +27,7 @@ use std::path::PathBuf; use tough::{HttpTransport, IntoVec, RepositoryLoader, TargetName}; use url::Url; -use crate::{Error, Result, SigningConfig, TrustedRoot}; +use crate::{Error, Result, SigningConfig, SigningConfigExt, TrustedRoot, TrustedRootExt}; /// Default Sigstore production TUF repository URL pub const DEFAULT_TUF_URL: &str = "https://tuf-repo-cdn.sigstore.dev"; @@ -268,7 +268,22 @@ impl TufClient { } } -impl TrustedRoot { +/// Extension trait for TrustedRoot with TUF fetching methods +pub trait TufTrustedRootExt { + /// Fetch the trusted root from Sigstore's production TUF repository + fn from_tuf() -> impl std::future::Future> + Send; + + /// Fetch the trusted root from Sigstore's staging TUF repository + fn from_tuf_staging() -> impl std::future::Future> + Send; + + /// Fetch the trusted root from a custom TUF repository + fn from_tuf_with_config( + config: TufConfig, + tuf_root: &'static [u8], + ) -> impl std::future::Future> + Send; +} + +impl TufTrustedRootExt for TrustedRoot { /// Fetch the trusted root from Sigstore's production TUF repository /// /// This securely fetches the `trusted_root.json` using the TUF protocol, @@ -277,7 +292,7 @@ impl TrustedRoot { /// # Example /// /// ```no_run - /// use sigstore_trust_root::TrustedRoot; + /// use sigstore_trust_root::{TrustedRoot, TufTrustedRootExt}; /// /// # async fn example() -> Result<(), sigstore_trust_root::Error> { /// let root = TrustedRoot::from_tuf().await?; @@ -285,23 +300,23 @@ impl TrustedRoot { /// # Ok(()) /// # } /// ``` - pub async fn from_tuf() -> Result { + async fn from_tuf() -> Result { let client = TufClient::production(); let bytes = client.fetch_target(TRUSTED_ROOT_TARGET).await?; let json = String::from_utf8(bytes) .map_err(|e| Error::Tuf(format!("Invalid UTF-8 in {}: {}", TRUSTED_ROOT_TARGET, e)))?; - Self::from_json(&json) + TrustedRoot::from_json(&json) } /// Fetch the trusted root from Sigstore's staging TUF repository /// /// This is useful for testing against the staging Sigstore infrastructure. - pub async fn from_tuf_staging() -> Result { + async fn from_tuf_staging() -> Result { let client = TufClient::staging(); let bytes = client.fetch_target(TRUSTED_ROOT_TARGET).await?; let json = String::from_utf8(bytes) .map_err(|e| Error::Tuf(format!("Invalid UTF-8 in {}: {}", TRUSTED_ROOT_TARGET, e)))?; - Self::from_json(&json) + TrustedRoot::from_json(&json) } /// Fetch the trusted root from a custom TUF repository @@ -310,16 +325,34 @@ impl TrustedRoot { /// /// * `config` - TUF client configuration /// * `tuf_root` - The TUF root.json to use for bootstrapping trust - pub async fn from_tuf_with_config(config: TufConfig, tuf_root: &'static [u8]) -> Result { + async fn from_tuf_with_config( + config: TufConfig, + tuf_root: &'static [u8], + ) -> Result { let client = TufClient::new(config, tuf_root); let bytes = client.fetch_target(TRUSTED_ROOT_TARGET).await?; let json = String::from_utf8(bytes) .map_err(|e| Error::Tuf(format!("Invalid UTF-8 in {}: {}", TRUSTED_ROOT_TARGET, e)))?; - Self::from_json(&json) + TrustedRoot::from_json(&json) } } -impl SigningConfig { +/// Extension trait for SigningConfig with TUF fetching methods +pub trait TufSigningConfigExt { + /// Fetch the signing config from Sigstore's production TUF repository + fn from_tuf() -> impl std::future::Future> + Send; + + /// Fetch the signing config from Sigstore's staging TUF repository + fn from_tuf_staging() -> impl std::future::Future> + Send; + + /// Fetch the signing config from a custom TUF repository + fn from_tuf_with_config( + config: TufConfig, + tuf_root: &'static [u8], + ) -> impl std::future::Future> + Send; +} + +impl TufSigningConfigExt for SigningConfig { /// Fetch the signing configuration from Sigstore's production TUF repository /// /// This securely fetches the `signing_config.v0.2.json` using the TUF protocol, @@ -334,7 +367,7 @@ impl SigningConfig { /// # Example /// /// ```no_run - /// use sigstore_trust_root::SigningConfig; + /// use sigstore_trust_root::{SigningConfig, SigningConfigExt, TufSigningConfigExt}; /// /// # async fn example() -> Result<(), sigstore_trust_root::Error> { /// let config = SigningConfig::from_tuf().await?; @@ -344,26 +377,26 @@ impl SigningConfig { /// # Ok(()) /// # } /// ``` - pub async fn from_tuf() -> Result { + async fn from_tuf() -> Result { let client = TufClient::production(); let bytes = client.fetch_target(SIGNING_CONFIG_TARGET).await?; let json = String::from_utf8(bytes).map_err(|e| { Error::Tuf(format!("Invalid UTF-8 in {}: {}", SIGNING_CONFIG_TARGET, e)) })?; - Self::from_json(&json) + SigningConfig::from_json(&json) } /// Fetch the signing configuration from Sigstore's staging TUF repository /// /// This is useful for testing against the staging Sigstore infrastructure, /// which may have newer API versions (e.g., Rekor V2) available. - pub async fn from_tuf_staging() -> Result { + async fn from_tuf_staging() -> Result { let client = TufClient::staging(); let bytes = client.fetch_target(SIGNING_CONFIG_TARGET).await?; let json = String::from_utf8(bytes).map_err(|e| { Error::Tuf(format!("Invalid UTF-8 in {}: {}", SIGNING_CONFIG_TARGET, e)) })?; - Self::from_json(&json) + SigningConfig::from_json(&json) } /// Fetch the signing configuration from a custom TUF repository @@ -372,13 +405,16 @@ impl SigningConfig { /// /// * `config` - TUF client configuration /// * `tuf_root` - The TUF root.json to use for bootstrapping trust - pub async fn from_tuf_with_config(config: TufConfig, tuf_root: &'static [u8]) -> Result { + async fn from_tuf_with_config( + config: TufConfig, + tuf_root: &'static [u8], + ) -> Result { let client = TufClient::new(config, tuf_root); let bytes = client.fetch_target(SIGNING_CONFIG_TARGET).await?; let json = String::from_utf8(bytes).map_err(|e| { Error::Tuf(format!("Invalid UTF-8 in {}: {}", SIGNING_CONFIG_TARGET, e)) })?; - Self::from_json(&json) + SigningConfig::from_json(&json) } } diff --git a/crates/sigstore-trust-root/tests/parsing.rs b/crates/sigstore-trust-root/tests/parsing.rs index 92a40a5..0550c09 100644 --- a/crates/sigstore-trust-root/tests/parsing.rs +++ b/crates/sigstore-trust-root/tests/parsing.rs @@ -1,4 +1,4 @@ -use sigstore_trust_root::TrustedRoot; +use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; use std::path::PathBuf; #[test] diff --git a/crates/sigstore-types/Cargo.toml b/crates/sigstore-types/Cargo.toml index 9d3b935..b52c30c 100644 --- a/crates/sigstore-types/Cargo.toml +++ b/crates/sigstore-types/Cargo.toml @@ -16,5 +16,8 @@ thiserror = { workspace = true } chrono = { workspace = true } pem = { workspace = true } +# Sigstore protobuf specs +sigstore_protobuf_specs = { workspace = true } + [dev-dependencies] hex = { workspace = true } diff --git a/crates/sigstore-types/src/bundle.rs b/crates/sigstore-types/src/bundle.rs index f9694b2..e45036d 100644 --- a/crates/sigstore-types/src/bundle.rs +++ b/crates/sigstore-types/src/bundle.rs @@ -3,27 +3,36 @@ //! The bundle is the core artifact produced by signing and consumed by verification. //! It contains the signature, verification material (certificate or public key), //! and transparency log entries. +//! +//! This module re-exports the official Sigstore protobuf types and provides +//! extension traits for common operations. use crate::checkpoint::Checkpoint; -use crate::dsse::DsseEnvelope; -use crate::encoding::{ - CanonicalizedBody, DerCertificate, LogIndex, LogKeyId, Sha256Hash, SignatureBytes, - SignedTimestamp, TimestampToken, -}; +use crate::encoding::DerCertificate; use crate::error::{Error, Result}; -use crate::hash::HashAlgorithm; -use serde::{Deserialize, Deserializer, Serialize}; use std::str::FromStr; -/// Deserialize a field that may be null as the default value -fn deserialize_null_as_default<'de, D, T>(deserializer: D) -> std::result::Result -where - D: Deserializer<'de>, - T: Default + Deserialize<'de>, -{ - let opt = Option::deserialize(deserializer)?; - Ok(opt.unwrap_or_default()) -} +// Re-export protobuf types +pub use sigstore_protobuf_specs::dev::sigstore::{ + bundle::v1::{ + bundle::Content as BundleContent, + verification_material::Content as VerificationMaterialContent, Bundle, + TimestampVerificationData, VerificationMaterial, + }, + common::v1::{ + HashAlgorithm as ProtoHashAlgorithm, HashOutput, LogId, MessageSignature, + PublicKeyIdentifier, Rfc3161SignedTimestamp, X509Certificate, X509CertificateChain, + }, + rekor::v1::{ + Checkpoint as ProtoCheckpoint, InclusionPromise, InclusionProof, KindVersion, + TransparencyLogEntry, + }, +}; + +// Re-export DSSE envelope from intoto +pub use sigstore_protobuf_specs::io::intoto::{ + Envelope as DsseEnvelope, Signature as DsseSignature, +}; /// Sigstore bundle media types #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -62,316 +71,216 @@ impl FromStr for MediaType { } } -/// Bundle version enum for serde -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -pub enum BundleVersion { - /// Version 0.1 - #[serde(rename = "0.1")] - V0_1, - /// Version 0.2 - #[serde(rename = "0.2")] - V0_2, - /// Version 0.3 - #[serde(rename = "0.3")] - V0_3, -} +/// Extension trait for Bundle with helper methods +pub trait BundleExt { + /// Parse a bundle from JSON + fn from_json(json: &str) -> Result; + + /// Serialize the bundle to JSON + fn to_json(&self) -> Result; + + /// Serialize the bundle to pretty-printed JSON + fn to_json_pretty(&self) -> Result; + + /// Get the bundle version from the media type + fn version(&self) -> Result; + + /// Get the signing certificate if present (DER-encoded) + fn signing_certificate(&self) -> Option; + + /// Check if the bundle has an inclusion proof + fn has_inclusion_proof(&self) -> bool; -/// The main Sigstore bundle structure -#[derive(Debug, Clone, PartialEq, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct Bundle { - /// Media type identifying the bundle version - pub media_type: String, - /// Verification material (certificate chain or public key) - pub verification_material: VerificationMaterial, - /// The content being signed (message signature or DSSE envelope) - #[serde(flatten)] - pub content: SignatureContent, + /// Check if the bundle has an inclusion promise (SET) + fn has_inclusion_promise(&self) -> bool; + + /// Get the transparency log entries + fn tlog_entries(&self) -> &[TransparencyLogEntry]; + + /// Check if bundle contains a message signature + fn is_message_signature(&self) -> bool; + + /// Check if bundle contains a DSSE envelope + fn is_dsse_envelope(&self) -> bool; + + /// Get the message signature if present + fn message_signature(&self) -> Option<&MessageSignature>; + + /// Get the DSSE envelope if present + fn dsse_envelope(&self) -> Option<&DsseEnvelope>; } -impl Bundle { - /// Parse a bundle from JSON, preserving raw DSSE envelope for hash verification - pub fn from_json(json: &str) -> Result { +impl BundleExt for Bundle { + fn from_json(json: &str) -> Result { serde_json::from_str(json).map_err(Error::Json) } - /// Serialize the bundle to JSON - pub fn to_json(&self) -> Result { + fn to_json(&self) -> Result { serde_json::to_string(self).map_err(Error::Json) } - /// Serialize the bundle to pretty-printed JSON - pub fn to_json_pretty(&self) -> Result { + fn to_json_pretty(&self) -> Result { serde_json::to_string_pretty(self).map_err(Error::Json) } - /// Get the bundle version from the media type - pub fn version(&self) -> Result { + fn version(&self) -> Result { MediaType::from_str(&self.media_type) } - /// Get the signing certificate if present - pub fn signing_certificate(&self) -> Option<&DerCertificate> { - match &self.verification_material.content { - VerificationMaterialContent::Certificate(cert) => Some(&cert.raw_bytes), - VerificationMaterialContent::X509CertificateChain { certificates } => { - certificates.first().map(|c| &c.raw_bytes) + fn signing_certificate(&self) -> Option { + let vm = self.verification_material.as_ref()?; + match &vm.content { + Some(VerificationMaterialContent::Certificate(cert)) => { + Some(DerCertificate::from_bytes(&cert.raw_bytes)) } - VerificationMaterialContent::PublicKey { .. } => None, + Some(VerificationMaterialContent::X509CertificateChain(chain)) => chain + .certificates + .first() + .map(|c| DerCertificate::from_bytes(&c.raw_bytes)), + Some(VerificationMaterialContent::PublicKey(_)) => None, + None => None, } } - /// Check if the bundle has an inclusion proof - pub fn has_inclusion_proof(&self) -> bool { - self.verification_material - .tlog_entries + fn has_inclusion_proof(&self) -> bool { + self.tlog_entries() .iter() .any(|e| e.inclusion_proof.is_some()) } - /// Check if the bundle has an inclusion promise (SET) - pub fn has_inclusion_promise(&self) -> bool { - self.verification_material - .tlog_entries + fn has_inclusion_promise(&self) -> bool { + self.tlog_entries() .iter() .any(|e| e.inclusion_promise.is_some()) } -} -/// The signature content (either a message signature or DSSE envelope) -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum SignatureContent { - /// A simple message signature - MessageSignature(MessageSignature), - /// A DSSE envelope - DsseEnvelope(DsseEnvelope), -} + fn tlog_entries(&self) -> &[TransparencyLogEntry] { + self.verification_material + .as_ref() + .map(|vm| vm.tlog_entries.as_slice()) + .unwrap_or(&[]) + } -/// A simple message signature -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct MessageSignature { - /// Message digest (optional, for detached signatures) - #[serde(skip_serializing_if = "Option::is_none")] - pub message_digest: Option, - /// The signature bytes - pub signature: SignatureBytes, -} + fn is_message_signature(&self) -> bool { + matches!(self.content, Some(BundleContent::MessageSignature(_))) + } -/// Message digest with algorithm -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct MessageDigest { - /// Hash algorithm - pub algorithm: HashAlgorithm, - /// Digest bytes - pub digest: Sha256Hash, -} + fn is_dsse_envelope(&self) -> bool { + matches!(self.content, Some(BundleContent::DsseEnvelope(_))) + } -/// Verification material containing certificate/key and log entries -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct VerificationMaterial { - /// Certificate, certificate chain, or public key - #[serde(flatten)] - pub content: VerificationMaterialContent, - /// Transparency log entries - #[serde(default)] - pub tlog_entries: Vec, - /// RFC 3161 timestamp verification data - #[serde(default, deserialize_with = "deserialize_null_as_default")] - pub timestamp_verification_data: TimestampVerificationData, -} + fn message_signature(&self) -> Option<&MessageSignature> { + match &self.content { + Some(BundleContent::MessageSignature(sig)) => Some(sig), + _ => None, + } + } -/// The verification material content type -/// -/// The field name in JSON determines which variant is used: -/// - "certificate" -> Certificate variant (v0.3 format) -/// - "x509CertificateChain" -> X509CertificateChain variant (v0.1/v0.2 format) -/// - "publicKey" -> PublicKey variant -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum VerificationMaterialContent { - /// Single certificate (v0.3 format) - Certificate(CertificateContent), - /// Certificate chain (v0.1/v0.2 format) - X509CertificateChain { - /// Chain of certificates - certificates: Vec, - }, - /// Public key (keyless alternative) - PublicKey { - /// Public key hint - hint: String, - }, + fn dsse_envelope(&self) -> Option<&DsseEnvelope> { + match &self.content { + Some(BundleContent::DsseEnvelope(env)) => Some(env), + _ => None, + } + } } -/// Certificate content for v0.3 bundles -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CertificateContent { - /// DER-encoded certificate - pub raw_bytes: DerCertificate, -} +/// Extension trait for TransparencyLogEntry +pub trait TransparencyLogEntryExt { + /// Get the log index as u64 + fn log_index_u64(&self) -> u64; -/// X.509 certificate in the chain -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct X509Certificate { - /// DER-encoded certificate - pub raw_bytes: DerCertificate, -} + /// Get the log key ID as base64 string + fn log_key_id(&self) -> Option; -/// A transparency log entry -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct TransparencyLogEntry { - /// Log index - pub log_index: LogIndex, - /// Log ID - pub log_id: LogId, - /// Kind and version of the entry - pub kind_version: KindVersion, - /// Integrated time (Unix timestamp) - #[serde(default, skip_serializing_if = "String::is_empty")] - pub integrated_time: String, - /// Inclusion promise (Signed Entry Timestamp) - #[serde(skip_serializing_if = "Option::is_none")] - pub inclusion_promise: Option, - /// Inclusion proof - #[serde(skip_serializing_if = "Option::is_none")] - pub inclusion_proof: Option, - /// Canonicalized body - pub canonicalized_body: CanonicalizedBody, + /// Get the integrated time as Unix timestamp + fn integrated_time_secs(&self) -> i64; } -/// Log identifier -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LogId { - /// Key ID (base64 encoded SHA-256 of public key) - pub key_id: LogKeyId, -} +impl TransparencyLogEntryExt for TransparencyLogEntry { + fn log_index_u64(&self) -> u64 { + self.log_index as u64 + } -/// Entry kind and version -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct KindVersion { - /// Entry kind (e.g., "hashedrekord") - pub kind: String, - /// Entry version (e.g., "0.0.1") - pub version: String, -} + fn log_key_id(&self) -> Option { + use base64::Engine; + self.log_id + .as_ref() + .map(|id| base64::engine::general_purpose::STANDARD.encode(&id.key_id)) + } -/// Inclusion promise (Signed Entry Timestamp) -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct InclusionPromise { - /// Signed entry timestamp - pub signed_entry_timestamp: SignedTimestamp, + fn integrated_time_secs(&self) -> i64 { + self.integrated_time + } } -/// Inclusion proof in the Merkle tree -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct InclusionProof { - /// Index of the entry in the log - pub log_index: LogIndex, - /// Root hash of the tree - pub root_hash: Sha256Hash, - /// Tree size at time of proof - pub tree_size: String, - /// Hashes in the inclusion proof path - #[serde(with = "sha256_hash_vec")] - pub hashes: Vec, - /// Checkpoint (signed tree head) - optional - #[serde(default, skip_serializing_if = "CheckpointData::is_empty")] - pub checkpoint: CheckpointData, -} +/// Extension trait for InclusionProof +pub trait InclusionProofExt { + /// Get the log index as u64 + fn log_index_u64(&self) -> u64; -/// Serde helper for `Vec` -mod sha256_hash_vec { - use super::Sha256Hash; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(hashes: &[Sha256Hash], serializer: S) -> Result - where - S: Serializer, - { - // Sha256Hash already implements Serialize (as base64) - hashes.serialize(serializer) - } + /// Get the tree size as u64 + fn tree_size_u64(&self) -> u64; - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - Vec::::deserialize(deserializer) - } + /// Parse the checkpoint text + fn parse_checkpoint(&self) -> Result; } -/// Checkpoint data in inclusion proof -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] -#[serde(rename_all = "camelCase")] -pub struct CheckpointData { - /// Text representation of the checkpoint - #[serde(default)] - pub envelope: String, -} +impl InclusionProofExt for InclusionProof { + fn log_index_u64(&self) -> u64 { + self.log_index as u64 + } -impl CheckpointData { - /// Parse the checkpoint text - pub fn parse(&self) -> Result { - Checkpoint::from_text(&self.envelope) + fn tree_size_u64(&self) -> u64 { + self.tree_size as u64 } - /// Check if checkpoint data is empty - pub fn is_empty(&self) -> bool { - self.envelope.is_empty() + fn parse_checkpoint(&self) -> Result { + let checkpoint = self + .checkpoint + .as_ref() + .ok_or_else(|| Error::MissingField("checkpoint".to_string()))?; + Checkpoint::from_text(&checkpoint.envelope) } } -/// RFC 3161 timestamp verification data -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)] -#[serde(rename_all = "camelCase")] -pub struct TimestampVerificationData { - /// RFC 3161 signed timestamps - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub rfc3161_timestamps: Vec, +/// Extension trait for DsseEnvelope +pub trait DsseEnvelopeExt { + /// Get the Pre-Authentication Encoding (PAE) bytes + /// + /// PAE is the string that gets signed in DSSE: + /// `DSSEv1 ` + fn pae(&self) -> Vec; } -/// An RFC 3161 timestamp -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Rfc3161Timestamp { - /// Signed timestamp data (DER-encoded) - pub signed_timestamp: TimestampToken, +impl DsseEnvelopeExt for DsseEnvelope { + fn pae(&self) -> Vec { + pae(&self.payload_type, &self.payload) + } } -// Custom Deserialize implementation for Bundle -impl<'de> Deserialize<'de> for Bundle { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - #[derive(Deserialize)] - #[serde(rename_all = "camelCase")] - struct BundleHelper { - media_type: String, - verification_material: VerificationMaterial, - #[serde(flatten)] - content: SignatureContent, - } +/// Compute the Pre-Authentication Encoding (PAE) +/// +/// Format: `DSSEv1 ` +pub fn pae(payload_type: &str, payload: &[u8]) -> Vec { + let mut result = Vec::new(); - let helper = BundleHelper::deserialize(deserializer)?; + // "DSSEv1" + space + result.extend_from_slice(b"DSSEv1 "); - Ok(Bundle { - media_type: helper.media_type, - verification_material: helper.verification_material, - content: helper.content, - }) - } + // payload_type length + space + result.extend_from_slice(format!("{} ", payload_type.len()).as_bytes()); + + // payload_type + space + result.extend_from_slice(payload_type.as_bytes()); + result.push(b' '); + + // payload length + space + result.extend_from_slice(format!("{} ", payload.len()).as_bytes()); + + // payload + result.extend_from_slice(payload); + + result } #[cfg(test)] @@ -398,4 +307,12 @@ mod tests { fn test_media_type_invalid() { assert!(MediaType::from_str("invalid").is_err()); } + + #[test] + fn test_pae() { + // Test vector from DSSE spec + let pae_result = pae("application/example", b"hello world"); + let expected = b"DSSEv1 19 application/example 11 hello world"; + assert_eq!(pae_result, expected); + } } diff --git a/crates/sigstore-types/src/dsse.rs b/crates/sigstore-types/src/dsse.rs deleted file mode 100644 index 37d70c5..0000000 --- a/crates/sigstore-types/src/dsse.rs +++ /dev/null @@ -1,141 +0,0 @@ -//! Dead Simple Signing Envelope (DSSE) types -//! -//! DSSE is a signature envelope format used for signing arbitrary payloads. -//! Specification: - -use crate::encoding::{KeyId, PayloadBytes, SignatureBytes}; -use serde::{Deserialize, Serialize}; - -/// A DSSE envelope containing a signed payload -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct DsseEnvelope { - /// Type URI of the payload - pub payload_type: String, - /// Payload bytes - pub payload: PayloadBytes, - /// Signatures over the PAE (Pre-Authentication Encoding) - pub signatures: Vec, -} - -/// A signature in a DSSE envelope -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct DsseSignature { - /// Signature bytes - pub sig: SignatureBytes, - /// Key ID (optional hint for key lookup) - #[serde(default, skip_serializing_if = "KeyId::is_empty")] - pub keyid: KeyId, -} - -impl DsseEnvelope { - /// Create a new DSSE envelope - pub fn new( - payload_type: String, - payload: PayloadBytes, - signatures: Vec, - ) -> Self { - Self { - payload_type, - payload, - signatures, - } - } - - /// Get the Pre-Authentication Encoding (PAE) string - /// - /// PAE is the string that gets signed in DSSE: - /// `DSSEv1 ` - pub fn pae(&self) -> Vec { - pae(&self.payload_type, self.payload.as_bytes()) - } - - /// Decode the payload bytes - pub fn decode_payload(&self) -> Vec { - self.payload.as_bytes().to_vec() - } -} - -/// Compute the Pre-Authentication Encoding (PAE) -/// -/// Format: `DSSEv1 ` -pub fn pae(payload_type: &str, payload: &[u8]) -> Vec { - let mut result = Vec::new(); - - // "DSSEv1" + space - result.extend_from_slice(b"DSSEv1 "); - - // payload_type length + space - result.extend_from_slice(format!("{} ", payload_type.len()).as_bytes()); - - // payload_type + space - result.extend_from_slice(payload_type.as_bytes()); - result.push(b' '); - - // payload length + space - result.extend_from_slice(format!("{} ", payload.len()).as_bytes()); - - // payload - result.extend_from_slice(payload); - - result -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_pae() { - // Test vector from DSSE spec - let pae_result = pae("application/example", b"hello world"); - let expected = b"DSSEv1 19 application/example 11 hello world"; - assert_eq!(pae_result, expected); - } - - #[test] - fn test_dsse_envelope_serde() { - let envelope = DsseEnvelope { - payload_type: "application/vnd.in-toto+json".to_string(), - payload: PayloadBytes::from_bytes(b"{\"_type\":\"https://in-toto.io/Statement/v1\"}"), - signatures: vec![DsseSignature { - sig: SignatureBytes::from_bytes(b"\x30\x44\x02\x20"), - keyid: KeyId::default(), - }], - }; - - let json = serde_json::to_string(&envelope).unwrap(); - let parsed: DsseEnvelope = serde_json::from_str(&json).unwrap(); - assert_eq!(envelope, parsed); - } - - #[test] - fn test_dsse_envelope_keyid_handling() { - // Test that empty keyid is omitted (matches GitHub/cosign behavior) - let json_with_empty_keyid = r#"{"payloadType":"application/vnd.in-toto+json","payload":"dGVzdA==","signatures":[{"sig":"c2ln","keyid":""}]}"#; - - let envelope: DsseEnvelope = serde_json::from_str(json_with_empty_keyid).unwrap(); - assert_eq!(envelope.signatures[0].keyid, KeyId::default()); - - let reserialized = serde_json::to_string(&envelope).unwrap(); - assert!( - !reserialized.contains("keyid"), - "Empty keyid should be omitted from output" - ); - - // Test that missing keyid deserializes to default - let json_without_keyid = r#"{"payloadType":"application/vnd.in-toto+json","payload":"dGVzdA==","signatures":[{"sig":"c2ln"}]}"#; - let envelope_no_keyid: DsseEnvelope = serde_json::from_str(json_without_keyid).unwrap(); - assert_eq!(envelope_no_keyid.signatures[0].keyid, KeyId::default()); - - // Test with non-empty keyid - should be preserved - let json_with_keyid = r#"{"payloadType":"application/vnd.in-toto+json","payload":"dGVzdA==","signatures":[{"sig":"c2ln","keyid":"test-key"}]}"#; - let envelope_with_keyid: DsseEnvelope = serde_json::from_str(json_with_keyid).unwrap(); - let json_out = serde_json::to_string(&envelope_with_keyid).unwrap(); - assert!( - json_out.contains(r#""keyid":"test-key""#), - "Non-empty keyid should be included in output" - ); - } -} diff --git a/crates/sigstore-types/src/lib.rs b/crates/sigstore-types/src/lib.rs index 4a12a0c..5e6a149 100644 --- a/crates/sigstore-types/src/lib.rs +++ b/crates/sigstore-types/src/lib.rs @@ -2,24 +2,33 @@ //! //! This crate provides the fundamental data structures used throughout the Sigstore //! ecosystem, including bundle formats, transparency log entries, and trust roots. +//! +//! Bundle types are re-exported from the official Sigstore protobuf specs, with +//! extension traits providing convenient methods. pub mod artifact; pub mod bundle; pub mod checkpoint; -pub mod dsse; pub mod encoding; pub mod error; pub mod hash; pub mod intoto; +// Note: dsse module removed - DSSE types now come from protobuf specs via bundle module + pub use artifact::Artifact; + +// Re-export protobuf bundle types and extension traits pub use bundle::{ - Bundle, BundleVersion, CheckpointData, InclusionPromise, InclusionProof, KindVersion, LogId, - MediaType, MessageDigest, MessageSignature, SignatureContent, TransparencyLogEntry, - VerificationMaterial, + pae, Bundle, BundleContent, BundleExt, DsseEnvelope, DsseEnvelopeExt, DsseSignature, + HashOutput, InclusionPromise, InclusionProof, InclusionProofExt, KindVersion, LogId, MediaType, + MessageSignature, ProtoCheckpoint, ProtoHashAlgorithm, PublicKeyIdentifier, + Rfc3161SignedTimestamp, TimestampVerificationData, TransparencyLogEntry, + TransparencyLogEntryExt, VerificationMaterial, VerificationMaterialContent, X509Certificate, + X509CertificateChain, }; + pub use checkpoint::{Checkpoint, CheckpointSignature}; -pub use dsse::{pae, DsseEnvelope, DsseSignature}; pub use encoding::{ base64_bytes, base64_bytes_option, hex_bytes, CanonicalizedBody, DerCertificate, DerPublicKey, EntryUuid, HexHash, HexLogId, KeyHint, KeyId, LogIndex, LogKeyId, PayloadBytes, PemContent, diff --git a/crates/sigstore-verify/examples/verify_bundle.rs b/crates/sigstore-verify/examples/verify_bundle.rs index b24df58..7e02cd6 100644 --- a/crates/sigstore-verify/examples/verify_bundle.rs +++ b/crates/sigstore-verify/examples/verify_bundle.rs @@ -28,8 +28,8 @@ //! gh attestation verify --owner //! ``` -use sigstore_trust_root::TrustedRoot; -use sigstore_types::Bundle; +use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; +use sigstore_types::{Bundle, BundleExt}; use sigstore_verify::{verify, VerificationPolicy}; use std::env; diff --git a/crates/sigstore-verify/src/lib.rs b/crates/sigstore-verify/src/lib.rs index 79471e0..35326c5 100644 --- a/crates/sigstore-verify/src/lib.rs +++ b/crates/sigstore-verify/src/lib.rs @@ -6,8 +6,8 @@ //! //! ```no_run //! use sigstore_verify::{verify, VerificationPolicy}; -//! use sigstore_trust_root::TrustedRoot; -//! use sigstore_types::Bundle; +//! use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; +//! use sigstore_types::{Bundle, BundleExt}; //! //! # fn example() -> Result<(), Box> { //! let trusted_root = TrustedRoot::production()?; diff --git a/crates/sigstore-verify/src/verify.rs b/crates/sigstore-verify/src/verify.rs index 4a74572..0f81d66 100644 --- a/crates/sigstore-verify/src/verify.rs +++ b/crates/sigstore-verify/src/verify.rs @@ -8,7 +8,7 @@ use sigstore_bundle::ValidationOptions; use sigstore_crypto::parse_certificate_info; use sigstore_trust_root::TrustedRoot; -use sigstore_types::{Artifact, Bundle, Sha256Hash, SignatureContent, Statement}; +use sigstore_types::{Artifact, Bundle, BundleContent, DsseEnvelopeExt, Sha256Hash, Statement}; /// Default clock skew tolerance in seconds (60 seconds = 1 minute) pub const DEFAULT_CLOCK_SKEW_SECONDS: i64 = 60; @@ -172,8 +172,8 @@ impl Verifier { /// /// ```no_run /// use sigstore_verify::{Verifier, VerificationPolicy}; - /// use sigstore_trust_root::TrustedRoot; - /// use sigstore_types::{Artifact, Bundle, Sha256Hash}; + /// use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; + /// use sigstore_types::{Artifact, Bundle, BundleExt, Sha256Hash}; /// /// # fn example() -> Result<(), Box> { /// let trusted_root = TrustedRoot::production()?; @@ -226,10 +226,17 @@ impl Verifier { validate_bundle_with_options(bundle, &options) .map_err(|e| Error::Verification(format!("bundle validation failed: {}", e)))?; + // Get verification material + let vm = bundle.verification_material.as_ref().ok_or_else(|| { + Error::Verification("bundle missing verification material".to_string()) + })?; + + let vm_content = vm.content.as_ref().ok_or_else(|| { + Error::Verification("bundle missing verification material content".to_string()) + })?; + // Extract certificate for verification - let cert = crate::verify_impl::helpers::extract_certificate( - &bundle.verification_material.content, - )?; + let cert = crate::verify_impl::helpers::extract_certificate(vm_content)?; let cert_info = parse_certificate_info(cert.as_bytes()) .map_err(|e| Error::Verification(format!("failed to parse certificate: {}", e)))?; @@ -253,7 +260,7 @@ impl Verifier { // is valid at the time of signing, and has CODE_SIGNING EKU. if policy.verify_certificate { crate::verify_impl::helpers::verify_certificate_chain( - &bundle.verification_material.content, + vm_content, validation_time, &self.trusted_root, )?; @@ -262,10 +269,7 @@ impl Verifier { crate::verify_impl::helpers::validate_certificate_time(validation_time, &cert_info)?; // (2): Verify the signing certificate's SCT. - crate::verify_impl::helpers::verify_sct( - &bundle.verification_material.content, - &self.trusted_root, - )?; + crate::verify_impl::helpers::verify_sct(vm_content, &self.trusted_root)?; } // (3): Verify against the given `VerificationPolicy`. @@ -314,19 +318,18 @@ impl Verifier { // (7): Verify the signature and input against the signing certificate's // public key. // For DSSE envelopes, verify using PAE (Pre-Authentication Encoding) - if let SignatureContent::DsseEnvelope(envelope) = &bundle.content { - let payload_bytes = envelope.decode_payload(); - + if let Some(BundleContent::DsseEnvelope(envelope)) = &bundle.content { // Compute the PAE that was signed - let pae = sigstore_types::pae(&envelope.payload_type, &payload_bytes); + let pae = envelope.pae(); // Verify at least one signature is cryptographically valid let mut any_sig_valid = false; for sig in &envelope.signatures { + let sig_bytes = sigstore_types::SignatureBytes::new(sig.sig.clone()); if sigstore_crypto::verify_signature( &cert_info.public_key, &pae, - &sig.sig, + &sig_bytes, cert_info.signing_scheme, ) .is_ok() @@ -344,12 +347,10 @@ impl Verifier { // Verify artifact hash matches (for DSSE with in-toto statements) if envelope.payload_type == "application/vnd.in-toto+json" { - let payload_bytes = envelope.payload.as_bytes(); - let artifact_hash = compute_artifact_digest(&artifact); let artifact_hash_hex = artifact_hash.to_hex(); - let payload_str = std::str::from_utf8(payload_bytes).map_err(|e| { + let payload_str = std::str::from_utf8(&envelope.payload).map_err(|e| { Error::Verification(format!("payload is not valid UTF-8: {}", e)) })?; diff --git a/crates/sigstore-verify/src/verify_impl/hashedrekord.rs b/crates/sigstore-verify/src/verify_impl/hashedrekord.rs index eab7492..df6a672 100644 --- a/crates/sigstore-verify/src/verify_impl/hashedrekord.rs +++ b/crates/sigstore-verify/src/verify_impl/hashedrekord.rs @@ -4,18 +4,27 @@ //! artifact hash verification and certificate/signature matching. use crate::error::{Error, Result}; +use base64::Engine; use sigstore_rekor::body::RekorEntryBody; -use sigstore_types::bundle::VerificationMaterialContent; -use sigstore_types::{ - Artifact, Bundle, Sha256Hash, SignatureBytes, SignatureContent, TransparencyLogEntry, -}; +use sigstore_types::bundle::{BundleContent, VerificationMaterialContent}; +use sigstore_types::{Artifact, Bundle, Sha256Hash, SignatureBytes, TransparencyLogEntry}; use x509_cert::der::Decode; use x509_cert::Certificate; /// Verify artifact hash matches what's in Rekor (for hashedrekord entries) pub fn verify_hashedrekord_entries(bundle: &Bundle, artifact: &Artifact<'_>) -> Result<()> { - for entry in &bundle.verification_material.tlog_entries { - if entry.kind_version.kind == "hashedrekord" { + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), + }; + + for entry in &vm.tlog_entries { + let kind = entry + .kind_version + .as_ref() + .map(|kv| kv.kind.as_str()) + .unwrap_or(""); + if kind == "hashedrekord" { verify_hashedrekord_entry(entry, bundle, artifact)?; } } @@ -28,13 +37,14 @@ fn verify_hashedrekord_entry( bundle: &Bundle, artifact: &Artifact<'_>, ) -> Result<()> { + let kv = entry.kind_version.as_ref().ok_or_else(|| { + Error::Verification("hashedrekord entry missing kind_version".to_string()) + })?; + // Parse the Rekor entry body (convert canonicalized body to base64 string) - let body = RekorEntryBody::from_base64_json( - &entry.canonicalized_body.to_base64(), - &entry.kind_version.kind, - &entry.kind_version.version, - ) - .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; + let body_base64 = base64::engine::general_purpose::STANDARD.encode(&entry.canonicalized_body); + let body = RekorEntryBody::from_base64_json(&body_base64, &kv.kind, &kv.version) + .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; // Compute artifact hash from artifact (bytes or pre-computed digest) let artifact_hash = compute_artifact_digest(artifact); @@ -58,7 +68,7 @@ fn verify_hashedrekord_entry( _ => { return Err(Error::Verification(format!( "expected HashedRekord body, got different type for version {}", - entry.kind_version.version + kv.version ))); } }; @@ -135,20 +145,22 @@ fn validate_certificate_match( if let Some(rekor_cert_der) = rekor_cert_der_opt { // Get the certificate from the bundle - let bundle_cert = match &bundle.verification_material.content { - VerificationMaterialContent::X509CertificateChain { certificates } => { - certificates.first().map(|c| &c.raw_bytes) + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), + }; + + let bundle_cert: Option<&Vec> = match &vm.content { + Some(VerificationMaterialContent::X509CertificateChain(chain)) => { + chain.certificates.first().map(|c| &c.raw_bytes) } - VerificationMaterialContent::Certificate(cert) => Some(&cert.raw_bytes), + Some(VerificationMaterialContent::Certificate(cert)) => Some(&cert.raw_bytes), _ => None, }; if let Some(bundle_cert) = bundle_cert { - // Bundle certificate is DerCertificate, get raw bytes - let bundle_cert_der = bundle_cert.as_bytes(); - // Compare certificates - if bundle_cert_der != rekor_cert_der { + if bundle_cert != &rekor_cert_der { return Err(Error::Verification( "certificate in bundle does not match certificate in Rekor entry".to_string(), )); @@ -180,11 +192,9 @@ fn validate_signature_match( if let Some(rekor_sig) = rekor_sig { // Get the signature from the bundle (only for MessageSignature, not DSSE) - if let SignatureContent::MessageSignature(sig) = &bundle.content { - let bundle_sig = &sig.signature; - - // Compare signatures (both are SignatureBytes) - if bundle_sig != rekor_sig { + if let Some(BundleContent::MessageSignature(sig)) = &bundle.content { + // bundle signature is Vec, rekor_sig is SignatureBytes + if sig.signature != rekor_sig.as_bytes() { return Err(Error::Verification( "signature in bundle does not match signature in Rekor entry".to_string(), )); @@ -215,7 +225,7 @@ fn verify_signature_cryptographically( artifact: &Artifact<'_>, ) -> Result<()> { // Only verify for MessageSignature (not DSSE envelopes) - if let SignatureContent::MessageSignature(_) = &bundle.content { + if let Some(BundleContent::MessageSignature(_)) = &bundle.content { // Extract the signature from Rekor let signature_bytes = match body { RekorEntryBody::HashedRekordV001(rekord) => { @@ -234,20 +244,22 @@ fn verify_signature_cryptographically( }; // Get the certificate from the bundle - let bundle_cert = match &bundle.verification_material.content { - VerificationMaterialContent::X509CertificateChain { certificates } => { - certificates.first().map(|c| &c.raw_bytes) + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), + }; + + let bundle_cert: Option<&Vec> = match &vm.content { + Some(VerificationMaterialContent::X509CertificateChain(chain)) => { + chain.certificates.first().map(|c| &c.raw_bytes) } - VerificationMaterialContent::Certificate(cert) => Some(&cert.raw_bytes), + Some(VerificationMaterialContent::Certificate(cert)) => Some(&cert.raw_bytes), _ => None, }; if let Some(bundle_cert) = bundle_cert { - // Get certificate DER bytes directly - let cert_der = bundle_cert.as_bytes(); - // Parse certificate to extract public key and algorithm - let cert_info = sigstore_crypto::x509::parse_certificate_info(cert_der)?; + let cert_info = sigstore_crypto::x509::parse_certificate_info(bundle_cert)?; match artifact { Artifact::Bytes(bytes) => { @@ -306,21 +318,29 @@ fn verify_signature_cryptographically( /// Validate that integrated time is within certificate validity period fn validate_integrated_time(entry: &TransparencyLogEntry, bundle: &Bundle) -> Result<()> { - let bundle_cert = match &bundle.verification_material.content { - VerificationMaterialContent::X509CertificateChain { certificates } => { - certificates.first().map(|c| &c.raw_bytes) + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), + }; + + let bundle_cert: Option<&Vec> = match &vm.content { + Some(VerificationMaterialContent::X509CertificateChain(chain)) => { + chain.certificates.first().map(|c| &c.raw_bytes) } - VerificationMaterialContent::Certificate(cert) => Some(&cert.raw_bytes), + Some(VerificationMaterialContent::Certificate(cert)) => Some(&cert.raw_bytes), _ => None, }; if let Some(bundle_cert) = bundle_cert { - let bundle_cert_der = bundle_cert.as_bytes(); + let kv = match entry.kind_version.as_ref() { + Some(kv) => kv, + None => return Ok(()), + }; // Only validate integrated time for hashedrekord 0.0.1 - // For 0.0.2 (Rekor v2), integrated_time is not present - if entry.kind_version.version == "0.0.1" && !entry.integrated_time.is_empty() { - let cert = Certificate::from_der(bundle_cert_der).map_err(|e| { + // For 0.0.2 (Rekor v2), integrated_time is 0 + if kv.version == "0.0.1" && entry.integrated_time > 0 { + let cert = Certificate::from_der(bundle_cert).map_err(|e| { Error::Verification(format!( "failed to parse certificate for time validation: {}", e @@ -345,9 +365,7 @@ fn validate_integrated_time(entry: &TransparencyLogEntry, bundle: &Bundle) -> Re })? .as_secs() as i64; - let integrated_time = entry.integrated_time.parse::().map_err(|e| { - Error::Verification(format!("failed to parse integrated time: {}", e)) - })?; + let integrated_time = entry.integrated_time; if integrated_time < not_before || integrated_time > not_after { return Err(Error::Verification(format!( diff --git a/crates/sigstore-verify/src/verify_impl/helpers.rs b/crates/sigstore-verify/src/verify_impl/helpers.rs index 667f244..e79dee7 100644 --- a/crates/sigstore-verify/src/verify_impl/helpers.rs +++ b/crates/sigstore-verify/src/verify_impl/helpers.rs @@ -7,9 +7,9 @@ use crate::error::{Error, Result}; use const_oid::db::rfc5912::ID_KP_CODE_SIGNING; use rustls_pki_types::{CertificateDer, UnixTime}; use sigstore_crypto::CertificateInfo; -use sigstore_trust_root::TrustedRoot; -use sigstore_types::bundle::VerificationMaterialContent; -use sigstore_types::{Bundle, DerCertificate, DerPublicKey, SignatureBytes, SignatureContent}; +use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; +use sigstore_types::bundle::{BundleContent, VerificationMaterialContent}; +use sigstore_types::{Bundle, DerCertificate, DerPublicKey, SignatureBytes}; use webpki::{anchor_from_trusted_cert, EndEntityCert, KeyUsage, ALL_VERIFICATION_ALGS}; /// Extract and decode the signing certificate from verification material @@ -17,31 +17,36 @@ pub fn extract_certificate( verification_material: &VerificationMaterialContent, ) -> Result { match verification_material { - VerificationMaterialContent::Certificate(cert) => Ok(cert.raw_bytes.clone()), - VerificationMaterialContent::X509CertificateChain { certificates } => { - if certificates.is_empty() { + VerificationMaterialContent::Certificate(cert) => { + Ok(DerCertificate::from_bytes(&cert.raw_bytes)) + } + VerificationMaterialContent::X509CertificateChain(chain) => { + if chain.certificates.is_empty() { return Err(Error::Verification("no certificates in chain".to_string())); } - Ok(certificates[0].raw_bytes.clone()) + Ok(DerCertificate::from_bytes(&chain.certificates[0].raw_bytes)) } - VerificationMaterialContent::PublicKey { .. } => Err(Error::Verification( + VerificationMaterialContent::PublicKey(_) => Err(Error::Verification( "public key verification not yet supported".to_string(), )), } } /// Extract signature from bundle content (needed for TSA verification) -pub fn extract_signature(content: &SignatureContent) -> Result { +pub fn extract_signature(content: &Option) -> Result { match content { - SignatureContent::MessageSignature(msg_sig) => Ok(msg_sig.signature.clone()), - SignatureContent::DsseEnvelope(envelope) => { + Some(BundleContent::MessageSignature(msg_sig)) => { + Ok(SignatureBytes::new(msg_sig.signature.clone())) + } + Some(BundleContent::DsseEnvelope(envelope)) => { if envelope.signatures.is_empty() { return Err(Error::Verification( "no signatures in DSSE envelope".to_string(), )); } - Ok(envelope.signatures[0].sig.clone()) + Ok(SignatureBytes::new(envelope.signatures[0].sig.clone())) } + None => Err(Error::Verification("bundle has no content".to_string())), } } @@ -55,25 +60,26 @@ pub fn extract_tsa_timestamp( use sigstore_tsa::{verify_timestamp_response, VerifyOpts as TsaVerifyOpts}; // Check if bundle has TSA timestamps - if bundle - .verification_material - .timestamp_verification_data - .rfc3161_timestamps - .is_empty() - { + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(None), + }; + + let tvd = match vm.timestamp_verification_data.as_ref() { + Some(tvd) => tvd, + None => return Ok(None), + }; + + if tvd.rfc3161_timestamps.is_empty() { return Ok(None); } let mut earliest_timestamp: Option = None; let mut any_timestamp_verified = false; - for ts in &bundle - .verification_material - .timestamp_verification_data - .rfc3161_timestamps - { - // Get the timestamp bytes - let ts_bytes = ts.signed_timestamp.as_bytes(); + for ts in &tvd.rfc3161_timestamps { + // Get the timestamp bytes (now Vec) + let ts_bytes = &ts.signed_timestamp; // Build verification options from trusted root let mut opts = TsaVerifyOpts::new(); @@ -119,13 +125,7 @@ pub fn extract_tsa_timestamp( } // If we have a trusted root and timestamps were present but none verified, that's an error - if !any_timestamp_verified - && !bundle - .verification_material - .timestamp_verification_data - .rfc3161_timestamps - .is_empty() - { + if !any_timestamp_verified && !tvd.rfc3161_timestamps.is_empty() { return Err(Error::Verification( "TSA timestamps present but none could be verified against trusted root".to_string(), )); @@ -137,11 +137,18 @@ pub fn extract_tsa_timestamp( /// Check if bundle contains V2 tlog entries (hashedrekord/dsse v0.0.2) /// V2 entries have integrated_time=0 and require RFC3161 timestamps pub fn has_v2_tlog_entries(bundle: &Bundle) -> bool { - bundle - .verification_material - .tlog_entries - .iter() - .any(|entry| entry.kind_version.version == "0.0.2") + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return false, + }; + + vm.tlog_entries.iter().any(|entry| { + entry + .kind_version + .as_ref() + .map(|kv| kv.version == "0.0.2") + .unwrap_or(false) + }) } /// Extract integrated time from V1 tlog entries that have inclusion promises. @@ -153,26 +160,28 @@ pub fn has_v2_tlog_entries(bundle: &Bundle) -> bool { /// /// Returns the earliest valid integrated time if any are present. fn extract_v1_integrated_time_with_promise(bundle: &Bundle) -> Option { + let vm = bundle.verification_material.as_ref()?; let mut earliest_time: Option = None; - for entry in &bundle.verification_material.tlog_entries { + for entry in &vm.tlog_entries { + let kv = entry.kind_version.as_ref()?; + // Only V1 entries (0.0.1) with inclusion promises are valid timestamp sources - let is_v1 = entry.kind_version.version == "0.0.1" - && (entry.kind_version.kind == "hashedrekord" || entry.kind_version.kind == "dsse"); + let is_v1 = kv.version == "0.0.1" && (kv.kind == "hashedrekord" || kv.kind == "dsse"); if !is_v1 || entry.inclusion_promise.is_none() { continue; } - if let Ok(time) = entry.integrated_time.parse::() { - if time > 0 { - if let Some(earliest) = earliest_time { - if time < earliest { - earliest_time = Some(time); - } - } else { + // integrated_time is now i64, not String + let time = entry.integrated_time; + if time > 0 { + if let Some(earliest) = earliest_time { + if time < earliest { earliest_time = Some(time); } + } else { + earliest_time = Some(time); } } } @@ -257,21 +266,19 @@ pub fn verify_certificate_chain( ) -> Result<()> { // Extract the end-entity certificate and any intermediates from the bundle let (ee_cert_der, intermediate_ders) = match verification_material { - VerificationMaterialContent::Certificate(cert) => { - (cert.raw_bytes.as_bytes().to_vec(), Vec::new()) - } - VerificationMaterialContent::X509CertificateChain { certificates } => { - if certificates.is_empty() { + VerificationMaterialContent::Certificate(cert) => (cert.raw_bytes.clone(), Vec::new()), + VerificationMaterialContent::X509CertificateChain(chain) => { + if chain.certificates.is_empty() { return Err(Error::Verification("no certificates in chain".to_string())); } - let ee = certificates[0].raw_bytes.as_bytes().to_vec(); - let intermediates: Vec> = certificates[1..] + let ee = chain.certificates[0].raw_bytes.clone(); + let intermediates: Vec> = chain.certificates[1..] .iter() - .map(|c| c.raw_bytes.as_bytes().to_vec()) + .map(|c| c.raw_bytes.clone()) .collect(); (ee, intermediates) } - VerificationMaterialContent::PublicKey { .. } => { + VerificationMaterialContent::PublicKey(_) => { return Err(Error::Verification( "public key verification not yet supported".to_string(), )); @@ -379,11 +386,9 @@ fn get_issuer_spki( use x509_cert::Certificate; // 1. Try to get from chain in verification material - if let VerificationMaterialContent::X509CertificateChain { certificates } = - verification_material - { - if certificates.len() > 1 { - let issuer_der = certificates[1].raw_bytes.as_bytes(); + if let VerificationMaterialContent::X509CertificateChain(chain) = verification_material { + if chain.certificates.len() > 1 { + let issuer_der = &chain.certificates[1].raw_bytes; let issuer_cert = Certificate::from_der(issuer_der).map_err(|e| { Error::Verification(format!("failed to parse issuer certificate: {}", e)) })?; diff --git a/crates/sigstore-verify/src/verify_impl/rekor.rs b/crates/sigstore-verify/src/verify_impl/rekor.rs index 7fbc9e7..2356999 100644 --- a/crates/sigstore-verify/src/verify_impl/rekor.rs +++ b/crates/sigstore-verify/src/verify_impl/rekor.rs @@ -6,18 +6,29 @@ use crate::error::{Error, Result}; use base64::Engine; use sigstore_rekor::body::RekorEntryBody; -use sigstore_types::{Bundle, SignatureContent, TransparencyLogEntry}; +use sigstore_types::bundle::BundleContent; +use sigstore_types::{Bundle, DsseEnvelope, TransparencyLogEntry}; /// Verify DSSE envelope matches Rekor entry (for DSSE bundles) pub fn verify_dsse_entries(bundle: &Bundle) -> Result<()> { let envelope = match &bundle.content { - SignatureContent::DsseEnvelope(env) => env, + Some(BundleContent::DsseEnvelope(env)) => env, _ => return Ok(()), // Not a DSSE bundle }; - for entry in &bundle.verification_material.tlog_entries { - if entry.kind_version.kind == "dsse" { - match entry.kind_version.version.as_str() { + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), + }; + + for entry in &vm.tlog_entries { + let kv = match entry.kind_version.as_ref() { + Some(kv) => kv, + None => continue, + }; + + if kv.kind == "dsse" { + match kv.version.as_str() { "0.0.1" => verify_dsse_v001(entry, envelope, bundle)?, "0.0.2" => verify_dsse_v002(entry, envelope, bundle)?, _ => {} // Unknown version, skip @@ -41,15 +52,17 @@ pub fn verify_dsse_entries(bundle: &Bundle) -> Result<()> { /// - Signatures list matches between entry and envelope (both signature and verifier) fn verify_dsse_v001( entry: &TransparencyLogEntry, - envelope: &sigstore_types::DsseEnvelope, + envelope: &DsseEnvelope, bundle: &Bundle, ) -> Result<()> { - let body = RekorEntryBody::from_base64_json( - &entry.canonicalized_body.to_base64(), - &entry.kind_version.kind, - &entry.kind_version.version, - ) - .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; + let kv = entry + .kind_version + .as_ref() + .ok_or_else(|| Error::Verification("entry missing kind_version".to_string()))?; + + let body_base64 = base64::engine::general_purpose::STANDARD.encode(&entry.canonicalized_body); + let body = RekorEntryBody::from_base64_json(&body_base64, &kv.kind, &kv.version) + .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; let (expected_hash, rekor_signatures) = match &body { RekorEntryBody::DsseV001(dsse_body) => ( @@ -64,8 +77,7 @@ fn verify_dsse_v001( }; // Verify payload hash (v0.0.1 uses hex encoding) - let payload_bytes = envelope.payload.as_bytes(); - let payload_hash = sigstore_crypto::sha256(payload_bytes); + let payload_hash = sigstore_crypto::sha256(&envelope.payload); let payload_hash_hex = hex::encode(payload_hash); if &payload_hash_hex != expected_hash { @@ -76,7 +88,15 @@ fn verify_dsse_v001( } // Extract the signing certificate from the bundle - let cert = super::helpers::extract_certificate(&bundle.verification_material.content)?; + let vm = bundle + .verification_material + .as_ref() + .ok_or_else(|| Error::Verification("bundle missing verification material".to_string()))?; + let vm_content = vm + .content + .as_ref() + .ok_or_else(|| Error::Verification("bundle missing verification content".to_string()))?; + let cert = super::helpers::extract_certificate(vm_content)?; // Verify that the signatures in the bundle match what's in Rekor // This prevents signature substitution attacks @@ -100,7 +120,7 @@ fn verify_dsse_v001( .map_err(|e| Error::Verification(format!("{}", e)))?; // Compare both signature bytes AND the verifier (certificate as DER) - if bundle_sig.sig.as_bytes() == rekor_sig.signature.as_bytes() + if bundle_sig.sig == rekor_sig.signature.as_bytes() && cert.as_bytes() == rekor_cert_der.as_bytes() { found = true; @@ -120,15 +140,17 @@ fn verify_dsse_v001( /// Verify DSSE v0.0.2 entry (payload hash and signature validation) fn verify_dsse_v002( entry: &TransparencyLogEntry, - envelope: &sigstore_types::DsseEnvelope, + envelope: &DsseEnvelope, bundle: &Bundle, ) -> Result<()> { - let body = RekorEntryBody::from_base64_json( - &entry.canonicalized_body.to_base64(), - &entry.kind_version.kind, - &entry.kind_version.version, - ) - .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; + let kv = entry + .kind_version + .as_ref() + .ok_or_else(|| Error::Verification("entry missing kind_version".to_string()))?; + + let body_base64 = base64::engine::general_purpose::STANDARD.encode(&entry.canonicalized_body); + let body = RekorEntryBody::from_base64_json(&body_base64, &kv.kind, &kv.version) + .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; let (expected_hash, rekor_signatures) = match &body { RekorEntryBody::DsseV002(dsse_body) => ( @@ -143,8 +165,7 @@ fn verify_dsse_v002( }; // Compute actual payload hash - let payload_bytes = envelope.payload.as_bytes(); - let payload_hash = sigstore_crypto::sha256(payload_bytes); + let payload_hash = sigstore_crypto::sha256(&envelope.payload); // Compare hashes (expected_hash is Vec) if payload_hash.as_slice() != expected_hash.as_slice() { @@ -156,7 +177,15 @@ fn verify_dsse_v002( } // Extract the signing certificate from the bundle - let cert = super::helpers::extract_certificate(&bundle.verification_material.content)?; + let vm = bundle + .verification_material + .as_ref() + .ok_or_else(|| Error::Verification("bundle missing verification material".to_string()))?; + let vm_content = vm + .content + .as_ref() + .ok_or_else(|| Error::Verification("bundle missing verification content".to_string()))?; + let cert = super::helpers::extract_certificate(vm_content)?; // Verify that the signatures in the bundle match what's in Rekor // This prevents signature substitution attacks @@ -176,9 +205,9 @@ fn verify_dsse_v002( let mut found = false; for rekor_sig in rekor_signatures { // Compare both signature bytes AND the verifier (certificate) - // The signature field in the bundle is SignatureBytes, compare as bytes + // The signature field in the bundle is Vec, compare directly // The verifier contains the x509Certificate.rawBytes (DerCertificate) - if bundle_sig.sig.as_bytes() == rekor_sig.content.as_bytes() + if bundle_sig.sig == rekor_sig.content.as_bytes() && cert.as_bytes() == rekor_sig.verifier.x509_certificate.raw_bytes.as_bytes() { found = true; @@ -198,12 +227,22 @@ fn verify_dsse_v002( /// Verify DSSE payload matches what's in Rekor (for intoto entries) pub fn verify_intoto_entries(bundle: &Bundle) -> Result<()> { let envelope = match &bundle.content { - SignatureContent::DsseEnvelope(env) => env, + Some(BundleContent::DsseEnvelope(env)) => env, _ => return Ok(()), // Not a DSSE bundle }; - for entry in &bundle.verification_material.tlog_entries { - if entry.kind_version.kind == "intoto" { + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(()), + }; + + for entry in &vm.tlog_entries { + let kv = match entry.kind_version.as_ref() { + Some(kv) => kv, + None => continue, + }; + + if kv.kind == "intoto" { verify_intoto_v002(entry, envelope)?; } } @@ -212,16 +251,15 @@ pub fn verify_intoto_entries(bundle: &Bundle) -> Result<()> { } /// Verify intoto v0.0.2 entry -fn verify_intoto_v002( - entry: &TransparencyLogEntry, - envelope: &sigstore_types::DsseEnvelope, -) -> Result<()> { - let body = RekorEntryBody::from_base64_json( - &entry.canonicalized_body.to_base64(), - &entry.kind_version.kind, - &entry.kind_version.version, - ) - .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; +fn verify_intoto_v002(entry: &TransparencyLogEntry, envelope: &DsseEnvelope) -> Result<()> { + let kv = entry + .kind_version + .as_ref() + .ok_or_else(|| Error::Verification("entry missing kind_version".to_string()))?; + + let body_base64 = base64::engine::general_purpose::STANDARD.encode(&entry.canonicalized_body); + let body = RekorEntryBody::from_base64_json(&body_base64, &kv.kind, &kv.version) + .map_err(|e| Error::Verification(format!("failed to parse Rekor body: {}", e)))?; let (rekor_payload_b64, rekor_signatures) = match &body { RekorEntryBody::IntotoV002(intoto_body) => ( @@ -240,8 +278,8 @@ fn verify_intoto_v002( .decode(rekor_payload_b64.as_bytes()) .map_err(|e| Error::Verification(format!("failed to decode Rekor payload: {}", e)))?; - // Compare with bundle payload bytes - if envelope.payload.as_bytes() != rekor_payload_bytes.as_slice() { + // Compare with bundle payload bytes (now Vec) + if envelope.payload != rekor_payload_bytes { return Err(Error::Verification( "DSSE payload in bundle does not match intoto Rekor entry".to_string(), )); @@ -258,7 +296,7 @@ fn verify_intoto_v002( Error::Verification(format!("failed to decode Rekor signature: {}", e)) })?; - if bundle_sig.sig.as_bytes() == rekor_sig_decoded.as_slice() { + if bundle_sig.sig == rekor_sig_decoded { found_match = true; break; } diff --git a/crates/sigstore-verify/src/verify_impl/sct.rs b/crates/sigstore-verify/src/verify_impl/sct.rs index d66fa6a..24c7a2e 100644 --- a/crates/sigstore-verify/src/verify_impl/sct.rs +++ b/crates/sigstore-verify/src/verify_impl/sct.rs @@ -7,7 +7,7 @@ use crate::error::{Error, Result}; use const_oid::db::rfc6962::CT_PRECERT_SCTS; use sigstore_crypto::{verify_signature, SigningScheme}; -use sigstore_trust_root::TrustedRoot; +use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; use sigstore_types::{DerPublicKey, SignatureBytes}; use tls_codec::{SerializeBytes, TlsByteVecU16, TlsByteVecU24, TlsSerializeBytes, TlsSize}; use x509_cert::{ @@ -217,12 +217,13 @@ pub fn verify_sct( // Find the matching CT log key by log ID let log_id = &sct.log_id.key_id; - let (_, public_key) = ct_keys.iter().find(|(id, _)| id == log_id).ok_or_else(|| { + let (_, public_key_bytes) = ct_keys.iter().find(|(id, _)| id == log_id).ok_or_else(|| { Error::Verification(format!( "SCT log ID {:?} not found in trusted root CT logs", hex::encode(log_id) )) })?; + let public_key = DerPublicKey::from_bytes(public_key_bytes); // Construct the DigitallySigned structure let digitally_signed = DigitallySigned::from_embedded_sct(&cert, &sct, issuer_key_hash)?; @@ -236,7 +237,7 @@ pub fn verify_sct( let signature = SignatureBytes::new(sct.signature.signature.clone().into_vec()); // Verify the signature - digitally_signed.verify(public_key, sig_alg, &signature)?; + digitally_signed.verify(&public_key, sig_alg, &signature)?; Ok(()) } diff --git a/crates/sigstore-verify/src/verify_impl/tlog.rs b/crates/sigstore-verify/src/verify_impl/tlog.rs index 337b78e..5a614c9 100644 --- a/crates/sigstore-verify/src/verify_impl/tlog.rs +++ b/crates/sigstore-verify/src/verify_impl/tlog.rs @@ -7,9 +7,9 @@ use crate::error::{Error, Result}; use base64::Engine; use serde::Serialize; use sigstore_crypto::{verify_signature, Checkpoint, SigningScheme}; -use sigstore_trust_root::TrustedRoot; +use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; use sigstore_types::bundle::InclusionProof; -use sigstore_types::{Bundle, SignatureBytes, TransparencyLogEntry}; +use sigstore_types::{Bundle, DerPublicKey, SignatureBytes, TransparencyLogEntry}; /// Verify transparency log entries (checkpoints and SETs) /// @@ -26,16 +26,19 @@ pub fn verify_tlog_entries( not_after: i64, clock_skew_seconds: i64, ) -> Result> { + let vm = match bundle.verification_material.as_ref() { + Some(vm) => vm, + None => return Ok(None), + }; + let mut integrated_time_result: Option = None; - for entry in &bundle.verification_material.tlog_entries { + for entry in &vm.tlog_entries { // Verify checkpoint signature if present if let Some(ref inclusion_proof) = entry.inclusion_proof { - verify_checkpoint( - &inclusion_proof.checkpoint.envelope, - inclusion_proof, - trusted_root, - )?; + if let Some(ref checkpoint) = inclusion_proof.checkpoint { + verify_checkpoint(&checkpoint.envelope, inclusion_proof, trusted_root)?; + } } // Verify inclusion promise (SET) if present @@ -43,38 +46,35 @@ pub fn verify_tlog_entries( verify_set(entry, trusted_root)?; } - // Validate integrated time - if !entry.integrated_time.is_empty() { - if let Ok(time) = entry.integrated_time.parse::() { - // Ignore 0 as it indicates invalid/missing time - if time > 0 { - // Check that integrated time is not in the future (with clock skew tolerance) - let now = chrono::Utc::now().timestamp(); - if time > now + clock_skew_seconds { - return Err(Error::Verification(format!( - "integrated time {} is in the future (current time: {}, tolerance: {}s)", - time, now, clock_skew_seconds - ))); - } - - // Check that integrated time is within certificate validity period - if time < not_before { - return Err(Error::Verification(format!( - "integrated time {} is before certificate validity (not_before: {})", - time, not_before - ))); - } - - if time > not_after { - return Err(Error::Verification(format!( - "integrated time {} is after certificate validity (not_after: {})", - time, not_after - ))); - } - - integrated_time_result = Some(time); - } + // Validate integrated time (now i64, not String) + let time = entry.integrated_time; + // Ignore 0 as it indicates invalid/missing time (V2 entries have integrated_time=0) + if time > 0 { + // Check that integrated time is not in the future (with clock skew tolerance) + let now = chrono::Utc::now().timestamp(); + if time > now + clock_skew_seconds { + return Err(Error::Verification(format!( + "integrated time {} is in the future (current time: {}, tolerance: {}s)", + time, now, clock_skew_seconds + ))); } + + // Check that integrated time is within certificate validity period + if time < not_before { + return Err(Error::Verification(format!( + "integrated time {} is before certificate validity (not_before: {})", + time, not_before + ))); + } + + if time > not_after { + return Err(Error::Verification(format!( + "integrated time {} is after certificate validity (not_after: {})", + time, not_after + ))); + } + + integrated_time_result = Some(time); } } @@ -96,14 +96,14 @@ pub fn verify_checkpoint( // Verify that the checkpoint's root hash matches the inclusion proof's root hash let checkpoint_root_hash = &checkpoint.root_hash; - // The root hash in the inclusion proof is already a Sha256Hash + // The root hash in the inclusion proof is now Vec let proof_root_hash = &inclusion_proof.root_hash; - if checkpoint_root_hash.as_bytes() != proof_root_hash.as_bytes() { + if checkpoint_root_hash.as_bytes() != proof_root_hash.as_slice() { return Err(Error::Verification(format!( "Checkpoint root hash mismatch: expected {}, got {}", checkpoint_root_hash.to_hex(), - proof_root_hash.to_hex() + hex::encode(proof_root_hash) ))); } @@ -116,11 +116,12 @@ pub fn verify_checkpoint( for sig in &checkpoint.signatures { // Find the key with matching key hint for (key_hint, public_key) in &rekor_keys { - if &sig.key_id == key_hint { + if sig.key_id.as_ref() == key_hint { // Found matching key, verify the signature using automatic key type detection let message = checkpoint.signed_data(); + let der_key = DerPublicKey::from_bytes(public_key); - verify_signature_auto(public_key, &sig.signature, message).map_err(|e| { + verify_signature_auto(&der_key, &sig.signature, message).map_err(|e| { Error::Verification(format!("Checkpoint signature verification failed: {}", e)) })?; @@ -152,28 +153,31 @@ pub fn verify_set(entry: &TransparencyLogEntry, trusted_root: &TrustedRoot) -> R .as_ref() .ok_or(Error::Verification("Missing inclusion promise".into()))?; + // Get log_id (now Option with key_id: Vec) + let log_id = entry + .log_id + .as_ref() + .ok_or_else(|| Error::Verification("Missing log ID".to_string()))?; + + // Convert log key_id to base64 string for lookup + let log_key_id_b64 = base64::engine::general_purpose::STANDARD.encode(&log_id.key_id); + // Find the key for the log ID - let log_key = trusted_root - .rekor_key_for_log(&entry.log_id.key_id) - .map_err(|_| Error::Verification(format!("Unknown log ID: {}", entry.log_id.key_id)))?; + let log_key_bytes = trusted_root + .rekor_key_for_log(&log_key_id_b64) + .map_err(|_| Error::Verification(format!("Unknown log ID: {}", log_key_id_b64)))?; + let log_key = DerPublicKey::from_bytes(&log_key_bytes); // Construct the payload (base64-encoded body) - let body = entry.canonicalized_body.to_base64(); + let body = base64::engine::general_purpose::STANDARD.encode(&entry.canonicalized_body); - let integrated_time = entry - .integrated_time - .parse::() - .map_err(|_| Error::Verification("Invalid integrated time".into()))?; - let log_index = entry - .log_index - .as_u64() - .map_err(|_| Error::Verification("Invalid log index".into()))? as i64; + // integrated_time is now i64 + let integrated_time = entry.integrated_time; + // log_index is now i64 + let log_index = entry.log_index; // Log ID for payload must be hex encoded - let log_id_bytes = base64::engine::general_purpose::STANDARD - .decode(entry.log_id.key_id.as_str()) - .map_err(|_| Error::Verification("Invalid base64 log ID".into()))?; - let log_id_hex = hex::encode(log_id_bytes); + let log_id_hex = hex::encode(&log_id.key_id); let payload = RekorPayload { body, @@ -185,8 +189,8 @@ pub fn verify_set(entry: &TransparencyLogEntry, trusted_root: &TrustedRoot) -> R let canonical_json = serde_json_canonicalizer::to_vec(&payload) .map_err(|e| Error::Verification(format!("Canonicalization failed: {}", e)))?; - // Get signature bytes from signed timestamp - let signature = SignatureBytes::new(promise.signed_entry_timestamp.as_bytes().to_vec()); + // Get signature bytes from signed timestamp (now Vec) + let signature = SignatureBytes::new(promise.signed_entry_timestamp.clone()); verify_signature( &log_key, diff --git a/crates/sigstore-verify/tests/verification_tests.rs b/crates/sigstore-verify/tests/verification_tests.rs index 6b1e64e..ca835a8 100644 --- a/crates/sigstore-verify/tests/verification_tests.rs +++ b/crates/sigstore-verify/tests/verification_tests.rs @@ -2,10 +2,9 @@ //! //! These tests validate the complete verification flow using real bundles. -use sigstore_trust_root::TrustedRoot; -use sigstore_types::{LogIndex, Sha256Hash}; +use sigstore_trust_root::{TrustedRoot, TrustedRootExt}; +use sigstore_types::{Bundle, BundleContent, BundleExt, Sha256Hash}; use sigstore_verify::bundle::{validate_bundle, validate_bundle_with_options, ValidationOptions}; -use sigstore_verify::types::Bundle; use sigstore_verify::{verify, VerificationPolicy, Verifier}; use x509_cert::der::Decode; @@ -15,10 +14,10 @@ use x509_cert::der::Decode; /// For hashedrekord bundles, extracts from the message digest field. fn extract_artifact_digest(bundle: &Bundle) -> Option { match &bundle.content { - sigstore_verify::types::SignatureContent::DsseEnvelope(env) => { + Some(BundleContent::DsseEnvelope(env)) => { if env.payload_type == "application/vnd.in-toto+json" { - let payload_bytes = env.decode_payload(); - let payload_str = String::from_utf8(payload_bytes).ok()?; + // payload is already Vec in protobuf types + let payload_str = String::from_utf8(env.payload.clone()).ok()?; let statement: serde_json::Value = serde_json::from_str(&payload_str).ok()?; let subject = statement["subject"].as_array()?.first()?; let sha256 = subject["digest"]["sha256"].as_str()?; @@ -27,10 +26,11 @@ fn extract_artifact_digest(bundle: &Bundle) -> Option { None } } - sigstore_verify::types::SignatureContent::MessageSignature(msg_sig) => msg_sig + Some(BundleContent::MessageSignature(msg_sig)) => msg_sig .message_digest .as_ref() - .and_then(|d| Sha256Hash::try_from_slice(d.digest.as_bytes()).ok()), + .and_then(|d| Sha256Hash::try_from_slice(&d.digest).ok()), + None => None, } } @@ -86,7 +86,8 @@ fn test_parse_v03_bundle() { assert!(bundle.media_type.contains("v0.3")); assert!(bundle.has_inclusion_proof()); - assert!(!bundle.verification_material.tlog_entries.is_empty()); + let vm = bundle.verification_material.as_ref().unwrap(); + assert!(!vm.tlog_entries.is_empty()); } #[test] @@ -99,7 +100,7 @@ fn test_parse_v03_dsse_bundle() { // Check DSSE envelope match &bundle.content { - sigstore_verify::types::SignatureContent::DsseEnvelope(env) => { + Some(BundleContent::DsseEnvelope(env)) => { assert_eq!(env.payload_type, "application/vnd.in-toto+json"); assert!(!env.signatures.is_empty()); } @@ -289,13 +290,14 @@ fn test_full_verification_flow() { ); // Extract tlog entry info - let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.kind_version.kind, "dsse"); - assert_eq!(entry.log_index, LogIndex::new("166143216".to_string())); + let vm = bundle.verification_material.as_ref().unwrap(); + let entry = &vm.tlog_entries[0]; + assert_eq!(entry.kind_version.as_ref().unwrap().kind, "dsse"); + assert_eq!(entry.log_index, 166143216_i64); // Verify inclusion proof let proof = entry.inclusion_proof.as_ref().expect("Should have proof"); - assert_eq!(proof.tree_size, "44238955"); + assert_eq!(proof.tree_size, 44238955_i64); assert_eq!(proof.hashes.len(), 10); // Run full verification - extract digest from bundle @@ -330,13 +332,14 @@ fn test_full_verification_flow_happy_path() { ); // Extract tlog entry info - let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.kind_version.kind, "dsse"); - assert_eq!(entry.log_index, LogIndex::new("155690850".to_string())); + let vm = bundle.verification_material.as_ref().unwrap(); + let entry = &vm.tlog_entries[0]; + assert_eq!(entry.kind_version.as_ref().unwrap().kind, "dsse"); + assert_eq!(entry.log_index, 155690850_i64); // Verify inclusion proof let proof = entry.inclusion_proof.as_ref().expect("Should have proof"); - assert_eq!(proof.tree_size, "33786589"); + assert_eq!(proof.tree_size, 33786589_i64); assert_eq!(proof.hashes.len(), 11); // Run full verification - extract digest from bundle @@ -380,14 +383,15 @@ fn test_verification_with_different_bundle_versions() { #[test] fn test_checkpoint_parsing() { + use sigstore_types::InclusionProofExt; let bundle = Bundle::from_json(V03_BUNDLE_DSSE).unwrap(); - let entry = &bundle.verification_material.tlog_entries[0]; + let vm = bundle.verification_material.as_ref().unwrap(); + let entry = &vm.tlog_entries[0]; let proof = entry.inclusion_proof.as_ref().unwrap(); // Parse checkpoint let checkpoint = proof - .checkpoint - .parse() + .parse_checkpoint() .expect("Failed to parse checkpoint"); assert_eq!( @@ -410,10 +414,9 @@ fn test_serialization_roundtrip() { // Verify key properties match assert_eq!(bundle.media_type, bundle2.media_type); - assert_eq!( - bundle.verification_material.tlog_entries.len(), - bundle2.verification_material.tlog_entries.len() - ); + let vm1 = bundle.verification_material.as_ref().unwrap(); + let vm2 = bundle2.verification_material.as_ref().unwrap(); + assert_eq!(vm1.tlog_entries.len(), vm2.tlog_entries.len()); } // ==== Reference Implementation Bundle Tests ==== @@ -427,7 +430,7 @@ fn test_parse_dsse_bundle_from_python() { // Check DSSE envelope structure match &bundle.content { - sigstore_verify::types::SignatureContent::DsseEnvelope(env) => { + Some(BundleContent::DsseEnvelope(env)) => { assert_eq!(env.payload_type, "application/vnd.in-toto+json"); assert_eq!(env.signatures.len(), 1, "Should have exactly 1 signature"); } @@ -435,9 +438,10 @@ fn test_parse_dsse_bundle_from_python() { } // Verify tlog entry exists - assert_eq!(bundle.verification_material.tlog_entries.len(), 1); - let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.kind_version.kind, "intoto"); + let vm = bundle.verification_material.as_ref().unwrap(); + assert_eq!(vm.tlog_entries.len(), 1); + let entry = &vm.tlog_entries[0]; + assert_eq!(entry.kind_version.as_ref().unwrap().kind, "intoto"); } #[test] @@ -447,7 +451,7 @@ fn test_parse_dsse_bundle_with_multiple_signatures() { // Check DSSE envelope has multiple signatures match &bundle.content { - sigstore_verify::types::SignatureContent::DsseEnvelope(env) => { + Some(BundleContent::DsseEnvelope(env)) => { assert_eq!(env.payload_type, "application/vnd.in-toto+json"); assert_eq!(env.signatures.len(), 2, "Should have exactly 2 signatures"); } @@ -479,8 +483,9 @@ fn test_parse_cve_2022_36056_bundle() { assert!(bundle.media_type.contains("v0.3")); // Check it's a hashedrekord type - let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.kind_version.kind, "hashedrekord"); + let vm = bundle.verification_material.as_ref().unwrap(); + let entry = &vm.tlog_entries[0]; + assert_eq!(entry.kind_version.as_ref().unwrap().kind, "hashedrekord"); // Bundle structure should be valid let result = validate_bundle(&bundle); @@ -521,7 +526,7 @@ fn test_dsse_bundle_with_2_signatures_should_fail() { // Verify the bundle has 2 signatures match &bundle.content { - sigstore_verify::types::SignatureContent::DsseEnvelope(env) => { + Some(BundleContent::DsseEnvelope(env)) => { assert_eq!(env.signatures.len(), 2, "Bundle should have 2 signatures"); } _ => panic!("Expected DSSE envelope"), @@ -651,6 +656,7 @@ fn test_bundle_no_cert_v1() { /// Test bundle without checkpoint in inclusion proof #[test] fn test_bundle_no_checkpoint() { + use sigstore_types::InclusionProofExt; let bundle = Bundle::from_json(BUNDLE_NO_CHECKPOINT).expect("Failed to parse bundle_no_checkpoint"); @@ -658,9 +664,10 @@ fn test_bundle_no_checkpoint() { assert!(bundle.media_type.contains("0.2")); // Should have a tlog entry - assert!(!bundle.verification_material.tlog_entries.is_empty()); + let vm = bundle.verification_material.as_ref().unwrap(); + assert!(!vm.tlog_entries.is_empty()); - let entry = &bundle.verification_material.tlog_entries[0]; + let entry = &vm.tlog_entries[0]; let proof = entry.inclusion_proof.as_ref(); assert!(proof.is_some(), "Should have inclusion proof"); @@ -668,13 +675,14 @@ fn test_bundle_no_checkpoint() { let proof = proof.unwrap(); // Checkpoint should be empty (default value) + let checkpoint = proof.checkpoint.as_ref(); assert!( - proof.checkpoint.envelope.is_empty(), + checkpoint.is_none() || checkpoint.unwrap().envelope.is_empty(), "Checkpoint should be empty when missing from bundle" ); // Parsing empty checkpoint should fail - let checkpoint_result = proof.checkpoint.parse(); + let checkpoint_result = proof.parse_checkpoint(); assert!( checkpoint_result.is_err(), "Checkpoint parsing should fail when checkpoint is missing" @@ -691,8 +699,9 @@ fn test_bundle_no_log_entry() { assert!(bundle.media_type.contains("0.1")); // But should have no tlog entries + let vm = bundle.verification_material.as_ref().unwrap(); assert!( - bundle.verification_material.tlog_entries.is_empty(), + vm.tlog_entries.is_empty(), "Bundle should have no tlog entries" ); @@ -725,9 +734,10 @@ fn test_bundle_v3_no_signed_time() { assert!(bundle.media_type.contains("0.3")); // Should have a tlog entry - assert!(!bundle.verification_material.tlog_entries.is_empty()); + let vm = bundle.verification_material.as_ref().unwrap(); + assert!(!vm.tlog_entries.is_empty()); - let entry = &bundle.verification_material.tlog_entries[0]; + let entry = &vm.tlog_entries[0]; // Check that inclusion promise is missing assert!( @@ -786,13 +796,14 @@ fn test_bundle_v3_github_whl() { ); // Should have tlog entry + let vm = bundle.verification_material.as_ref().unwrap(); assert!( - !bundle.verification_material.tlog_entries.is_empty(), + !vm.tlog_entries.is_empty(), "Should have transparency log entry" ); // Should have inclusion proof - let entry = &bundle.verification_material.tlog_entries[0]; + let entry = &vm.tlog_entries[0]; assert!( entry.inclusion_proof.is_some(), "Should have inclusion proof" @@ -812,17 +823,16 @@ fn test_parse_conda_attestation_bundle() { // Should be DSSE envelope with in-toto attestation match &bundle.content { - sigstore_verify::types::SignatureContent::DsseEnvelope(env) => { + Some(BundleContent::DsseEnvelope(env)) => { assert_eq!( env.payload_type, "application/vnd.in-toto+json", "Should have in-toto payload type" ); assert_eq!(env.signatures.len(), 1, "Should have exactly 1 signature"); - // Decode payload and verify it's a conda attestation - let payload_bytes = env.decode_payload(); + // payload is already Vec in protobuf types let payload_str = - String::from_utf8(payload_bytes).expect("Payload should be valid UTF-8"); + String::from_utf8(env.payload.clone()).expect("Payload should be valid UTF-8"); let statement: serde_json::Value = serde_json::from_str(&payload_str).expect("Payload should be valid JSON"); @@ -856,9 +866,10 @@ fn test_parse_conda_attestation_bundle() { assert!(cert.is_some(), "Should have a signing certificate"); // Should have tlog entry - assert!(!bundle.verification_material.tlog_entries.is_empty()); - let entry = &bundle.verification_material.tlog_entries[0]; - assert_eq!(entry.kind_version.kind, "dsse"); + let vm = bundle.verification_material.as_ref().unwrap(); + assert!(!vm.tlog_entries.is_empty()); + let entry = &vm.tlog_entries[0]; + assert_eq!(entry.kind_version.as_ref().unwrap().kind, "dsse"); // Should have inclusion proof assert!(