diff --git a/Cargo.toml b/Cargo.toml index 0284457ba4..3e771523b2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,6 +69,7 @@ cached-client = ["cached"] [dependencies] async-trait = "0.1.52" base64 = "0.21.0" +hex = "0.4.3" cached = { version = "0.44.0", optional = true } cfg-if = "1.0.0" chrono = { version = "0.4.23" } @@ -133,6 +134,7 @@ tempfile = "3.3.0" testcontainers = "0.14" tracing-subscriber = { version = "0.3.9", features = ["env-filter"] } hex = "0.4.3" +hex-literal = "0.4" # cosign example mappings diff --git a/src/crypto/merkle/mod.rs b/src/crypto/merkle/mod.rs new file mode 100644 index 0000000000..c53cf0548d --- /dev/null +++ b/src/crypto/merkle/mod.rs @@ -0,0 +1,6 @@ +pub mod proof_verification; +pub mod rfc6962; + +pub use proof_verification::MerkleProofError; +pub(crate) use proof_verification::MerkleProofVerifier; +pub(crate) use rfc6962::{Rfc6269Default, Rfc6269HasherTrait}; diff --git a/src/crypto/merkle/proof_verification.rs b/src/crypto/merkle/proof_verification.rs new file mode 100644 index 0000000000..21a076c896 --- /dev/null +++ b/src/crypto/merkle/proof_verification.rs @@ -0,0 +1,857 @@ +use super::rfc6962::Rfc6269HasherTrait; +use digest::{Digest, Output}; +use hex::ToHex; +use std::cmp::Ordering; +use std::fmt::Debug; +use MerkleProofError::*; + +#[derive(Debug)] +pub enum MerkleProofError { + MismatchedRoot { expected: String, got: String }, + IndexGtTreeSize, + UnexpectedNonEmptyProof, + UnexpectedEmptyProof, + NewTreeSmaller { new: usize, old: usize }, + WrongProofSize { got: usize, want: usize }, + WrongEmptyTreeHash, +} + +pub(crate) trait MerkleProofVerifier: Rfc6269HasherTrait +where + O: Eq + AsRef<[u8]> + Clone + Debug, +{ + /// Used to verify hashes. + fn verify_match(a: &O, b: &O) -> Result<(), ()> { + (a == b).then_some(()).ok_or(()) + } + + /// `verify_inclusion` verifies the correctness of the inclusion proof for the leaf + /// with the specified `leaf_hash` and `index`, relatively to the tree of the given `tree_size` + /// and `root_hash`. Requires `0 <= index < tree_size`. + fn verify_inclusion( + index: usize, + leaf_hash: &O, + tree_size: usize, + proof_hashes: &[O], + root_hash: &O, + ) -> Result<(), MerkleProofError> { + if index >= tree_size { + return Err(IndexGtTreeSize); + } + Self::root_from_inclusion_proof(index, leaf_hash, tree_size, proof_hashes).and_then( + |calc_root| { + Self::verify_match(calc_root.as_ref(), root_hash).map_err(|_| MismatchedRoot { + got: root_hash.encode_hex(), + expected: calc_root.encode_hex(), + }) + }, + ) + } + + /// `root_from_inclusion_proof` calculates the expected root hash for a tree of the + /// given size, provided a leaf index and hash with the corresponding inclusion + /// proof. Requires `0 <= index < tree_size`. + fn root_from_inclusion_proof( + index: usize, + leaf_hash: &O, + tree_size: usize, + proof_hashes: &[O], + ) -> Result, MerkleProofError> { + if index >= tree_size { + return Err(IndexGtTreeSize); + } + let (inner, border) = Self::decomp_inclusion_proof(index, tree_size); + match (proof_hashes.len(), inner + border) { + (got, want) if got != want => { + return Err(WrongProofSize { + got: proof_hashes.len(), + want: inner + border, + }); + } + _ => {} + } + let res_left = Self::chain_inner(leaf_hash, &proof_hashes[..inner], index); + let res = Self::chain_border_right(&res_left, &proof_hashes[inner..]); + Ok(Box::new(res)) + } + + // `verify_consistency` checks that the passed-in consistency proof is valid + // between the passed in tree sizes, with respect to the corresponding root + // hashes. Requires `0 <= old_size <= new_size`.. + fn verify_consistency( + old_size: usize, + new_size: usize, + proof_hashes: &[O], + old_root: &O, + new_root: &O, + ) -> Result<(), MerkleProofError> { + match ( + Ord::cmp(&old_size, &new_size), + old_size == 0, + proof_hashes.is_empty(), + ) { + (Ordering::Greater, _, _) => { + return Err(NewTreeSmaller { + new: new_size, + old: old_size, + }); + } + // when sizes are equal and the proof is empty we can just verify the roots + (Ordering::Equal, _, true) => { + return Self::verify_match(old_root, new_root).map_err(|_| MismatchedRoot { + got: new_root.encode_hex(), + expected: old_root.encode_hex(), + }) + } + + // the proof cannot be empty if the sizes are equal or the previous size was zero + (Ordering::Equal, _, false) | (Ordering::Less, true, false) => { + return Err(UnexpectedNonEmptyProof) + } + // any proof is accepted if old_size == 0 and the hash is the expected empty hash + (Ordering::Less, true, true) => { + return Self::verify_match(old_root, &Self::empty_root()) + .map_err(|_| WrongEmptyTreeHash) + } + (Ordering::Less, false, true) => return Err(UnexpectedEmptyProof), + (Ordering::Less, false, false) => {} + } + + let shift = old_size.trailing_zeros() as usize; + let (inner, border) = Self::decomp_inclusion_proof(old_size - 1, new_size); + let inner = inner - shift; + + // The proof includes the root hash for the sub-tree of size 2^shift. + // Unless size1 is that very 2^shift. + let (seed, start) = if old_size == 1 << shift { + (old_root, 0) + } else { + (&proof_hashes[0], 1) + }; + + match (proof_hashes.len(), start + inner + border) { + (got, want) if got != want => return Err(WrongProofSize { got, want }), + _ => {} + } + + let proof = &proof_hashes[start..]; + let mask = (old_size - 1) >> shift; + + // verify the old hash is correct + let hash1 = Self::chain_inner_right(seed, &proof[..inner], mask); + let hash1 = Self::chain_border_right(&hash1, &proof[inner..]); + Self::verify_match(&hash1, old_root).map_err(|_| MismatchedRoot { + got: old_root.encode_hex(), + expected: hash1.encode_hex(), + })?; + // verify the new hash is correct + let hash2 = Self::chain_inner(seed, &proof[..inner], mask); + let hash2 = Self::chain_border_right(&hash2, &proof[inner..]); + Self::verify_match(&hash2, new_root).map_err(|_| MismatchedRoot { + got: new_root.encode_hex(), + expected: hash2.encode_hex(), + })?; + Ok(()) + } + + /// `chain_inner` computes a subtree hash for a node on or below the tree's right + /// border. Assumes `proof_hashes` are ordered from lower levels to upper, and + /// `seed` is the initial subtree/leaf hash on the path located at the specified + /// `index` on its level. + fn chain_inner(seed: &O, proof_hashes: &[O], index: usize) -> O { + proof_hashes + .iter() + .enumerate() + .fold(seed.clone(), |seed, (i, h)| { + let (left, right) = if ((index >> i) & 1) == 0 { + (&seed, h) + } else { + (h, &seed) + }; + Self::hash_children(left, right) + }) + } + + /// `chain_inner_right` computes a subtree hash like `chain_inner`, but only takes + /// hashes to the left from the path into consideration, which effectively means + /// the result is a hash of the corresponding earlier version of this subtree. + fn chain_inner_right(seed: &O, proof_hashes: &[O], index: usize) -> O { + proof_hashes + .iter() + .enumerate() + .fold(seed.clone(), |seed, (i, h)| { + if ((index >> i) & 1) == 1 { + Self::hash_children(h, seed) + } else { + seed + } + }) + } + + /// `chain_border_right` chains proof hashes along tree borders. This differs from + /// inner chaining because `proof` contains only left-side subtree hashes. + fn chain_border_right(seed: &O, proof_hashes: &[O]) -> O { + proof_hashes + .iter() + .fold(seed.clone(), |seed, h| Self::hash_children(h, seed)) + } + + /// `decomp_inclusion_proof` breaks down inclusion proof for a leaf at the specified + /// `index` in a tree of the specified `size` into 2 components. The splitting + /// point between them is where paths to leaves `index` and `tree_size-1` diverge. + /// Returns lengths of the bottom and upper proof parts correspondingly. The sum + /// of the two determines the correct length of the inclusion proof. + fn decomp_inclusion_proof(index: usize, tree_size: usize) -> (usize, usize) { + let inner: usize = Self::inner_proof_size(index, tree_size); + let border = (index >> inner).count_ones() as usize; + (inner, border) + } + + fn inner_proof_size(index: usize, tree_size: usize) -> usize { + u64::BITS as usize - ((index ^ (tree_size - 1)).leading_zeros() as usize) + } +} + +impl MerkleProofVerifier> for T where T: Digest {} + +#[cfg(test)] +mod test_verify { + use crate::crypto::merkle::rfc6962::Rfc6269HasherTrait; + use crate::crypto::merkle::{MerkleProofVerifier, Rfc6269Default}; + use hex_literal::hex; + + #[derive(Debug)] + struct InclusionProofTestVector<'a> { + leaf: usize, + size: usize, + proof: &'a [[u8; 32]], + } + + #[derive(Debug)] + struct ConsistencyTestVector<'a> { + size1: usize, + size2: usize, + proof: &'a [[u8; 32]], + } + + // InclusionProbe is a parameter set for inclusion proof verification. + #[derive(Debug)] + struct InclusionProbe { + leaf_index: usize, + tree_size: usize, + root: [u8; 32], + leaf_hash: [u8; 32], + proof: Vec<[u8; 32]>, + desc: &'static str, + } + + // ConsistencyProbe is a parameter set for consistency proof verification. + #[derive(Debug)] + struct ConsistencyProbe<'a> { + size1: usize, + size2: usize, + root1: &'a [u8; 32], + root2: &'a [u8; 32], + proof: Vec<[u8; 32]>, + desc: &'static str, + } + + const SHA256_SOME_HASH: [u8; 32] = + hex!("abacaba000000000000000000000000000000000000000000060061e00123456"); + + const SHA256_EMPTY_TREE_HASH: [u8; 32] = + hex!("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"); + + const ZERO_HASH: [u8; 32] = [0; 32]; + + const INCLUSION_PROOFS: [InclusionProofTestVector; 6] = [ + InclusionProofTestVector { + leaf: 0, + size: 0, + proof: &[], + }, + InclusionProofTestVector { + leaf: 1, + size: 1, + proof: &[], + }, + InclusionProofTestVector { + leaf: 1, + size: 8, + proof: &[ + hex!("96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7"), + hex!("5f083f0a1a33ca076a95279832580db3e0ef4584bdff1f54c8a360f50de3031e"), + hex!("6b47aaf29ee3c2af9af889bc1fb9254dabd31177f16232dd6aab035ca39bf6e4"), + ], + }, + InclusionProofTestVector { + leaf: 6, + size: 8, + proof: &[ + hex!("bc1a0643b12e4d2d7c77918f44e0f4f79a838b6cf9ec5b5c283e1f4d88599e6b"), + hex!("ca854ea128ed050b41b35ffc1b87b8eb2bde461e9e3b5596ece6b9d5975a0ae0"), + hex!("d37ee418976dd95753c1c73862b9398fa2a2cf9b4ff0fdfe8b30cd95209614b7"), + ], + }, + InclusionProofTestVector { + leaf: 3, + size: 3, + proof: &[hex!( + "fac54203e7cc696cf0dfcb42c92a1d9dbaf70ad9e621f4bd8d98662f00e3c125" + )], + }, + InclusionProofTestVector { + leaf: 2, + size: 5, + proof: &[ + hex!("6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d"), + hex!("5f083f0a1a33ca076a95279832580db3e0ef4584bdff1f54c8a360f50de3031e"), + hex!("bc1a0643b12e4d2d7c77918f44e0f4f79a838b6cf9ec5b5c283e1f4d88599e6b"), + ], + }, + ]; + + const CONSISTENCY_PROOFS: [ConsistencyTestVector; 5] = [ + ConsistencyTestVector { + size1: 1, + size2: 1, + proof: &[], + }, + ConsistencyTestVector { + size1: 1, + size2: 8, + proof: &[ + hex!("96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7"), + hex!("5f083f0a1a33ca076a95279832580db3e0ef4584bdff1f54c8a360f50de3031e"), + hex!("6b47aaf29ee3c2af9af889bc1fb9254dabd31177f16232dd6aab035ca39bf6e4"), + ], + }, + ConsistencyTestVector { + size1: 6, + size2: 8, + proof: &[ + hex!("0ebc5d3437fbe2db158b9f126a1d118e308181031d0a949f8dededebc558ef6a"), + hex!("ca854ea128ed050b41b35ffc1b87b8eb2bde461e9e3b5596ece6b9d5975a0ae0"), + hex!("d37ee418976dd95753c1c73862b9398fa2a2cf9b4ff0fdfe8b30cd95209614b7"), + ], + }, + ConsistencyTestVector { + size1: 2, + size2: 5, + proof: &[ + hex!("5f083f0a1a33ca076a95279832580db3e0ef4584bdff1f54c8a360f50de3031e"), + hex!("bc1a0643b12e4d2d7c77918f44e0f4f79a838b6cf9ec5b5c283e1f4d88599e6b"), + ], + }, + ConsistencyTestVector { + size1: 6, + size2: 7, + proof: &[ + hex!("0ebc5d3437fbe2db158b9f126a1d118e308181031d0a949f8dededebc558ef6a"), + hex!("b08693ec2e721597130641e8211e7eedccb4c26413963eee6c1e2ed16ffb1a5f"), + hex!("d37ee418976dd95753c1c73862b9398fa2a2cf9b4ff0fdfe8b30cd95209614b7"), + ], + }, + ]; + + const ROOTS: [[u8; 32]; 8] = [ + hex!("6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d"), + hex!("fac54203e7cc696cf0dfcb42c92a1d9dbaf70ad9e621f4bd8d98662f00e3c125"), + hex!("aeb6bcfe274b70a14fb067a5e5578264db0fa9b51af5e0ba159158f329e06e77"), + hex!("d37ee418976dd95753c1c73862b9398fa2a2cf9b4ff0fdfe8b30cd95209614b7"), + hex!("4e3bbb1f7b478dcfe71fb631631519a3bca12c9aefca1612bfce4c13a86264d4"), + hex!("76e67dadbcdf1e10e1b74ddc608abd2f98dfb16fbce75277b5232a127f2087ef"), + hex!("ddb89be403809e325750d3d263cd78929c2942b7942a34b77e122c9594a74c8c"), + hex!("5dc9da79a70659a9ad559cb701ded9a2ab9d823aad2f4960cfe370eff4604328"), + ]; + + const LEAVES: &[&[u8]] = &[ + &hex!(""), + &hex!("00"), + &hex!("10"), + &hex!("2021"), + &hex!("3031"), + &hex!("40414243"), + &hex!("5051525354555657"), + &hex!("606162636465666768696a6b6c6d6e6f"), + ]; + + fn corrupt_inclusion_proof( + leaf_index: usize, + tree_size: usize, + proof: &[[u8; 32]], + root: &[u8; 32], + leaf_hash: &[u8; 32], + ) -> Vec { + let ret = vec![ + // Wrong leaf index. + InclusionProbe { + leaf_index: leaf_index.wrapping_sub(1), // avoid panic due to underflow + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "leaf_index - 1", + }, + InclusionProbe { + leaf_index: leaf_index + 1, + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "leaf_index + 1", + }, + InclusionProbe { + leaf_index: leaf_index ^ 2, + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "leaf_index ^ 2", + }, // Wrong tree height. + InclusionProbe { + leaf_index, + tree_size: tree_size / 2, + root: *root, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "tree_size / 2", + }, // Wrong leaf or root. + InclusionProbe { + leaf_index, + tree_size: tree_size * 2, + root: *root, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "tree_size * 2", + }, + InclusionProbe { + leaf_index, + tree_size, + root: *root, + leaf_hash: *b"WrongLeafWrongLeafWrongLeafWrong", + proof: proof.to_vec(), + desc: "wrong leaf", + }, + InclusionProbe { + leaf_index, + tree_size, + root: SHA256_EMPTY_TREE_HASH, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "empty root", + }, + InclusionProbe { + leaf_index, + tree_size, + root: SHA256_SOME_HASH, + leaf_hash: *leaf_hash, + proof: proof.to_vec(), + desc: "random root", + }, // Add garbage at the end. + InclusionProbe { + leaf_index, + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: [proof.to_vec(), [[0 as u8; 32]].to_vec()].concat(), + desc: "trailing garbage", + }, + InclusionProbe { + leaf_index, + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: [proof.to_vec(), [root.clone()].to_vec()].concat(), + desc: "trailing root", + }, // Add garbage at the front. + InclusionProbe { + leaf_index, + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: [[[0 as u8; 32]].to_vec(), proof.to_vec()].concat(), + desc: "preceding garbage", + }, + InclusionProbe { + leaf_index, + tree_size, + root: *root, + leaf_hash: *leaf_hash, + proof: [[root.clone()].to_vec(), proof.to_vec()].concat(), + desc: "preceding root", + }, + ]; + + return ret; + } + + fn verifier_check( + leaf_index: usize, + tree_size: usize, + proof_hashes: &[[u8; 32]], + root: &[u8; 32], + leaf_hash: &[u8; 32], + ) -> Result<(), String> { + let probes = + corrupt_inclusion_proof(leaf_index, tree_size, &proof_hashes, &root, &leaf_hash); + let leaf_hash = leaf_hash.into(); + let root_hash = root.into(); + let proof_hashes = proof_hashes.iter().map(|&h| h.into()).collect::>(); + let got = Rfc6269Default::root_from_inclusion_proof( + leaf_index, + leaf_hash, + tree_size, + &proof_hashes, + ) + .map_err(|err| format!("{err:?}"))?; + Rfc6269Default::verify_match(got.as_ref().into(), root_hash) + .map_err(|_| format!("roots did not match got: {got:x?} expected: {root:x?}"))?; + Rfc6269Default::verify_inclusion( + leaf_index, + leaf_hash, + tree_size, + &proof_hashes, + root_hash, + ) + .map_err(|err| format!("{err:?}"))?; + + // returns Err if any probe is accepted + probes + .into_iter() + .map(|p| { + Rfc6269Default::verify_inclusion( + p.leaf_index, + (&p.leaf_hash).into(), + p.tree_size, + &p.proof.iter().map(|&h| h.into()).collect::>(), + (&p.root).into(), + ) + .err() + .ok_or(format!("accepted incorrect inclusion proof: {:?}", p.desc)) + }) + .collect::, _>>()?; + Ok(()) + } + + fn verifier_consistency_check( + size1: usize, + size2: usize, + proof: &[[u8; 32]], + root1: &[u8; 32], + root2: &[u8; 32], + ) -> Result<(), String> { + // Verify original consistency proof. + let proof_hashes = proof.iter().map(|&h| h.into()).collect::>(); + Rfc6269Default::verify_consistency(size1, size2, &proof_hashes, root1.into(), root2.into()) + .map_err(|err| format!("incorrectly rejected with {err:?}"))?; + // For simplicity test only non-trivial proofs that have root1 != root2, size1 != 0 and size1 != size2. + if proof.len() == 0 { + return Ok(()); + } + for (i, p) in corrupt_consistency_proof(size1, size2, root1, root2, proof) + .iter() + .enumerate() + { + Rfc6269Default::verify_consistency( + p.size1, + p.size2, + &p.proof.iter().map(|&h| h.into()).collect::>(), + p.root1.as_slice().into(), + p.root2.as_slice().into(), + ) + .err() + .ok_or(format!("[{i} incorrectly accepted: {:?}", p.desc))?; + } + + Ok(()) + } + + fn corrupt_consistency_proof<'a>( + size1: usize, + size2: usize, + root1: &'a [u8; 32], + root2: &'a [u8; 32], + proof: &[[u8; 32]], + ) -> Vec> { + let ln = proof.len(); + let mut ret = vec![ + // Wrong size1. + ConsistencyProbe { + size1: size1 - 1, + size2, + root1, + root2, + proof: proof.to_vec(), + desc: "size1 - 1", + }, + ConsistencyProbe { + size1: size1 + 1, + size2, + root1, + root2, + proof: proof.to_vec(), + desc: "size1 + 1", + }, + ConsistencyProbe { + size1: size1 ^ 2, + size2, + root1, + root2, + proof: proof.to_vec(), + desc: "size1 ^ 2", + }, + // Wrong tree height. + ConsistencyProbe { + size1, + size2: size2 * 2, + root1, + root2, + proof: proof.to_vec(), + desc: "size2 * 2", + }, + ConsistencyProbe { + size1, + size2: size2 / 2, + root1, + root2, + proof: proof.to_vec(), + desc: "size2 / 2", + }, + // Wrong root. + ConsistencyProbe { + size1, + size2, + root1: &ZERO_HASH, + root2, + proof: proof.to_vec(), + desc: "wrong root1", + }, + ConsistencyProbe { + size1, + size2, + root1, + root2: &ZERO_HASH, + proof: proof.to_vec(), + desc: "wrong root2", + }, + ConsistencyProbe { + size1, + size2, + root1: root2, + root2: root1, + proof: proof.to_vec(), + desc: "swapped roots", + }, + // Empty proof. + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: vec![], + desc: "empty proof", + }, + // Add garbage at the end. + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [proof, &[ZERO_HASH]].concat(), + desc: "trailing garbage", + }, + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [proof, &[*root1]].concat(), + desc: "trailing root1", + }, + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [proof, &[*root2]].concat(), + desc: "trailing root2", + }, + // Add garbage at the front. + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [&[ZERO_HASH], proof].concat(), + desc: "preceding garbage", + }, + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [&[*root1], proof].concat(), + desc: "preceding root1", + }, + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [&[*root2], proof].concat(), + desc: "preceding root2", + }, + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: [&[proof[0]], proof].concat(), + desc: "preceding proof[0]", + }, + ]; + if ln > 0 { + ret.push(ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: proof[..ln - 1].to_vec(), + desc: "truncated proof", + }); + } + // add probes with proves that have a flipped 4th bit of i-th byte of the i-th hash + ret.extend((0..ln).map(|i| { + let mut wrong_proof = proof.to_vec(); + wrong_proof[i][i] ^= 4; + ConsistencyProbe { + size1, + size2, + root1, + root2, + proof: wrong_proof, + desc: "proof with flipped bit", + } + })); + + return ret; + } + + #[test] + fn test_verify_inclusion_single_entry() { + let data = b"data"; + let hash = &Rfc6269Default::hash_leaf(data); + let proof = []; + let zero_hash = ZERO_HASH.as_slice().into(); + let test_cases = [ + (hash, hash, false), + (hash, zero_hash, true), + (zero_hash, hash, true), + ]; + for (i, (root, leaf, want_err)) in test_cases.into_iter().enumerate() { + let res = Rfc6269Default::verify_inclusion(0, leaf, 1, &proof, root); + assert_eq!( + res.is_err(), + want_err, + "unexpected inclusion proof result {res:?} for case {i:?}" + ) + } + } + + #[test] + fn test_verify_inclusion() { + let proof = []; + let probes = [(0, 0), (0, 1), (1, 0), (2, 1)]; + probes.into_iter().for_each(|(index, size)| { + let result = Rfc6269Default::verify_inclusion( + index, + SHA256_SOME_HASH.as_slice().into(), + size, + &proof, + ZERO_HASH.as_slice().into(), + ); + assert_eq!( + result.is_err(), + true, + "Incorrectly verified invalid root/leaf", + ); + let result = Rfc6269Default::verify_inclusion( + index, + ZERO_HASH.as_slice().into(), + size, + &proof, + SHA256_EMPTY_TREE_HASH.as_slice().into(), + ); + assert_eq!( + result.is_err(), + true, + "Incorrectly verified invalid root/leaf", + ); + let result = Rfc6269Default::verify_inclusion( + index, + SHA256_SOME_HASH.as_slice().into(), + size, + &proof, + SHA256_EMPTY_TREE_HASH.as_slice().into(), + ); + assert!(result.is_err(), "Incorrectly verified invalid root/leaf"); + }); + for i in 1..6 { + let p = &INCLUSION_PROOFS[i]; + let leaf_hash = &Rfc6269Default::hash_leaf(LEAVES[i]).into(); + let result = + verifier_check(p.leaf - 1, p.size, &p.proof, &ROOTS[p.size - 1], leaf_hash); + assert!(result.is_err(), "{result:?}") + } + } + + #[test] + fn test_verify_consistency() { + let root1 = &[0; 32].into(); + let root2 = &[1; 32].into(); + let proof1 = [].as_slice(); + let proof2 = [SHA256_EMPTY_TREE_HASH.into()]; + let empty_tree_hash = &SHA256_EMPTY_TREE_HASH.into(); + let test_cases = [ + (0, 0, root1, root2, proof1, true), + (1, 1, root1, root2, proof1, true), + // Sizes that are always consistent. + (0, 0, empty_tree_hash, empty_tree_hash, proof1, false), + (0, 1, empty_tree_hash, root2, proof1, false), + (1, 1, root2, root2, proof1, false), + // Time travel to the past. + (1, 0, root1, root2, proof1, true), + (2, 1, root1, root2, proof1, true), + // Empty proof. + (1, 2, root1, root2, proof1, true), + // Roots don't match. + (0, 0, empty_tree_hash, root2, proof1, true), + (1, 1, empty_tree_hash, root2, proof1, true), + // Roots match but the proof is not empty. + (0, 0, empty_tree_hash, empty_tree_hash, &proof2, true), + (0, 1, empty_tree_hash, empty_tree_hash, &proof2, true), + (1, 1, empty_tree_hash, empty_tree_hash, &proof2, true), + ]; + for (i, (size1, size2, root1, root2, proof, want_err)) in test_cases.into_iter().enumerate() + { + let res = Rfc6269Default::verify_consistency(size1, size2, proof, root1, root2); + assert_eq!( + res.is_err(), + want_err, + "unexpected proof result {res:?}, case {i}" + ); + } + + for (_, p) in CONSISTENCY_PROOFS.into_iter().enumerate() { + let result = verifier_consistency_check( + p.size1, + p.size2, + p.proof, + &ROOTS[p.size1 - 1], + &ROOTS[p.size2 - 1], + ); + assert!(result.is_ok(), "failed with error: {result:?}"); + } + } +} diff --git a/src/crypto/merkle/rfc6962.rs b/src/crypto/merkle/rfc6962.rs new file mode 100644 index 0000000000..1cd926b063 --- /dev/null +++ b/src/crypto/merkle/rfc6962.rs @@ -0,0 +1,111 @@ +use super::rfc6962::Rfc6269HashPrefix::{RFC6962LeafHashPrefix, RFC6962NodeHashPrefix}; +use digest::Output; +use sha2::{Digest, Sha256}; + +/// This is the prefix that gets added to the data before the hash is calculated. +#[repr(u8)] +enum Rfc6269HashPrefix { + RFC6962LeafHashPrefix = 0, + RFC6962NodeHashPrefix = 1, +} + +/// Trait that represents the [Merkle tree operations as defined in RFC6962](https://www.rfc-editor.org/rfc/rfc6962.html#section-2.1). +pub(crate) trait Rfc6269HasherTrait { + /// Hashing an empty root is equivalent to hashing an empty string. + fn empty_root() -> O; + /// Leaf hashes are calculated the following way: `hash(0x00 || leaf)`. + fn hash_leaf(leaf: impl AsRef<[u8]>) -> O; + /// The hash of nodes with children is calculated recursively as: `hash(0x01 || left || right)`. + fn hash_children(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> O; +} + +impl Rfc6269HasherTrait> for T +where + T: Digest, +{ + fn empty_root() -> Output { + T::new().finalize() + } + fn hash_leaf(leaf: impl AsRef<[u8]>) -> Output { + T::new() + .chain_update([RFC6962LeafHashPrefix as u8]) + .chain_update(leaf) + .finalize() + } + fn hash_children(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> Output { + T::new() + .chain_update([RFC6962NodeHashPrefix as u8]) + .chain_update(left) + .chain_update(right) + .finalize() + } +} + +/// RFC6962 uses SHA-256 as the default hash-function. +pub(crate) type Rfc6269Default = Sha256; + +/// These tests were taken from the [transparency-dev Merkle implementation](https://github.com/transparency-dev/merkle/blob/036047b5d2f7faf3b1ee643d391e60fe5b1defcf/rfc6962/rfc6962_test.go). +#[cfg(test)] +mod test_rfc6962 { + use crate::crypto::merkle::rfc6962::Rfc6269HasherTrait; + use crate::crypto::merkle::Rfc6269Default; + use hex_literal::hex; + + #[derive(Debug, PartialEq)] + struct TestCase { + pub desc: String, + pub got: [u8; 32], + pub want: [u8; 32], + } + + #[test] + fn test_hasher() { + let leaf_hash = Rfc6269Default::hash_leaf(b"L123456"); + let empty_leaf_hash = Rfc6269Default::hash_leaf(b""); + let test_cases: Vec<_> = [ + TestCase { + desc: "RFC6962 Empty".to_string(), + want: hex!("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"), + got: Rfc6269Default::empty_root().into(), + }, + TestCase { + desc: "RFC6962 Empty Leaf".to_string(), + want: hex!("6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d"), + got: empty_leaf_hash.into(), + }, + TestCase { + desc: "RFC6962 Leaf".to_string(), + want: hex!("395aa064aa4c29f7010acfe3f25db9485bbd4b91897b6ad7ad547639252b4d56"), + got: leaf_hash.into(), + }, + TestCase { + desc: "RFC6962 Node".to_string(), + want: hex!("aa217fe888e47007fa15edab33c2b492a722cb106c64667fc2b044444de66bbb"), + got: Rfc6269Default::hash_children(b"N123", b"N456").into(), + }, + ] + .into_iter() + .filter(|tc| tc.got != tc.want) + .collect(); + assert_eq!(test_cases.len(), 0, "failed tests: {test_cases:?}") + } + + #[test] + fn test_collisions() { + let l1 = b"Hello".to_vec(); + let l2 = b"World".to_vec(); + let hash1 = Rfc6269Default::hash_leaf(&l1); + let hash2 = Rfc6269Default::hash_leaf(&l2); + assert_ne!(hash1, hash2, "got identical hashes for different leafs"); + + let sub_hash1 = Rfc6269Default::hash_children(&l1, &l2); + let sub_hash2 = Rfc6269Default::hash_children(&l2, &l1); + assert_ne!(sub_hash1, sub_hash2, "got same hash for different order"); + + let forged_hash = Rfc6269Default::hash_leaf(&[l1, l2].concat()); + assert_ne!( + sub_hash1, forged_hash, + "hasher is not second-preimage resistant" + ); + } +} diff --git a/src/crypto/mod.rs b/src/crypto/mod.rs index c8a35e22bf..a5e7c2fa2b 100644 --- a/src/crypto/mod.rs +++ b/src/crypto/mod.rs @@ -22,6 +22,7 @@ use crate::errors::*; pub use signing_key::SigStoreSigner; pub use verification_key::CosignVerificationKey; +pub(crate) mod merkle; /// Different digital signature algorithms. /// * `RSA_PSS_SHA256`: RSA PSS padding using SHA-256 diff --git a/src/errors.rs b/src/errors.rs index f513f17543..d3339b4ff3 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -21,6 +21,7 @@ use thiserror::Error; use crate::cosign::{ constraint::SignConstraintRefVec, verification_constraint::VerificationConstraintRefVec, }; +use crate::crypto::merkle::MerkleProofError; #[cfg(feature = "cosign")] #[derive(Error, Debug)] @@ -67,6 +68,9 @@ pub enum SigstoreError { #[error(transparent)] Base64DecodeError(#[from] base64::DecodeError), + #[error(transparent)] + HexDecodeError(#[from] hex::FromHexError), + #[error("Public key with unsupported algorithm: {0}")] PublicKeyUnsupportedAlgorithmError(String), @@ -106,6 +110,12 @@ pub enum SigstoreError { #[error("Certificate pool error: {0}")] CertificatePoolError(String), + #[error("Consistency proof error: {0:?}")] + ConsistencyProofError(MerkleProofError), + + #[error("Inclusion Proof error: {0:?}")] + InclusionProofError(MerkleProofError), + #[error("Cannot fetch manifest of {image}: {error}")] RegistryFetchManifestError { image: String, error: String }, diff --git a/src/rekor/models/checkpoint.rs b/src/rekor/models/checkpoint.rs new file mode 100644 index 0000000000..ec3593ef0f --- /dev/null +++ b/src/rekor/models/checkpoint.rs @@ -0,0 +1,422 @@ +use crate::crypto::{CosignVerificationKey, Signature}; +use crate::errors::SigstoreError; +use crate::rekor::models::checkpoint::ParseCheckpointError::*; +use base64::prelude::BASE64_STANDARD; +use base64::Engine; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::fmt::{Display, Formatter}; +use std::str::FromStr; + +/// A checkpoint (also known as a signed tree head) that served by the log. +/// It represents the log state at a point in time. +/// The `note` field stores this data, +/// and its authenticity can be verified with the data in `signature`. +#[derive(Debug, PartialEq, Clone, Eq)] +pub struct SignedCheckpoint { + pub note: CheckpointNote, + pub signature: CheckpointSignature, +} + +/// The metadata that is contained in a checkpoint. +#[derive(Debug, PartialEq, Clone, Eq)] +pub struct CheckpointNote { + /// origin is the unique identifier/version string + pub origin: String, + /// merkle tree size + pub size: u64, + /// merkle tree root hash + pub hash: [u8; 32], + /// catches the rest of the content + pub other_content: Vec, +} + +/// The signature that is contained in a checkpoint. +/// The `key_fingerprint` are the first four bytes of the key hash of the corresponding log public key. +/// This can be used to identity the key which should be used to verify the checkpoint. +/// The actual signature is stored in `raw`. +#[derive(Debug, PartialEq, Clone, Eq)] +pub struct CheckpointSignature { + pub key_fingerprint: [u8; 4], + pub raw: Vec, + pub name: String, +} + +/// Checkpoints can contain additional data. +/// The `KeyValue` variant is for lines that are in the format `: `. +/// Everything else is stored in the `Value` variant. +#[derive(Debug, PartialEq, Clone, Eq)] +pub enum OtherContent { + KeyValue(String, String), + Value(String), +} + +impl Display for OtherContent { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + OtherContent::KeyValue(k, v) => write!(f, "{k}: {v}"), + OtherContent::Value(v) => write!(f, "{v}"), + } + } +} + +#[derive(Debug, Eq, PartialEq)] +pub enum ParseCheckpointError { + DecodeError(String), +} + +impl FromStr for SignedCheckpoint { + type Err = ParseCheckpointError; + + fn from_str(s: &str) -> Result { + // refer to: https://github.com/sigstore/rekor/blob/d702f84e6b8b127662c5e717ee550de1242a6aec/pkg/util/checkpoint.go + + let checkpoint = s.trim_start_matches('"').trim_end_matches('"'); + + let Some((note, signature)) = checkpoint.split_once("\n\n") else { + return Err(DecodeError("unexpected checkpoint format".to_string())); + }; + + let signature = signature.parse()?; + let note = CheckpointNote::unmarshal(note)?; + + Ok(SignedCheckpoint { note, signature }) + } +} + +impl CheckpointNote { + // Output is the part of the checkpoint that is signed. + fn marshal(&self) -> String { + let hash_b64 = BASE64_STANDARD.encode(self.hash); + let other_content: String = self + .other_content + .iter() + .map(|c| format!("{c}\n")) + .collect(); + format!( + "{}\n{}\n{hash_b64}\n{other_content}", + self.origin, self.size + ) + } + fn unmarshal(s: &str) -> Result { + // refer to: https://github.com/sigstore/rekor/blob/d702f84e6b8b127662c5e717ee550de1242a6aec/pkg/util/checkpoint.go + // note is separated by new lines + let split_note = s.split('\n').collect::>(); + let [origin, size, hash_b64, other_content @ ..] = split_note.as_slice() else { + return Err(DecodeError("note not in expected format".to_string())); + }; + + let size = size + .parse() + .map_err(|_| DecodeError("expected decimal string for size".into()))?; + + let hash = BASE64_STANDARD + .decode(hash_b64) + .map_err(|_| DecodeError("failed to decode root hash".to_string())) + .and_then(|v| { + <[u8; 32]>::try_from(v) + .map_err(|_| DecodeError("expected 32-byte hash".to_string())) + })?; + + let other_content = other_content + .iter() + .filter(|s| !s.is_empty()) + .map(|s| { + s.split_once(": ") + .map(|(k, v)| OtherContent::KeyValue(k.to_string(), v.to_string())) + .unwrap_or(OtherContent::Value(s.to_string())) + }) + .collect(); + + Ok(CheckpointNote { + origin: origin.to_string(), + size, + hash, + other_content, + }) + } +} + +impl ToString for SignedCheckpoint { + fn to_string(&self) -> String { + let note = self.note.marshal(); + let signature = self.signature.to_string(); + format!("{note}\n{signature}") + } +} + +impl SignedCheckpoint { + /// This method can be used to verify that the checkpoint was issued by the log with the + /// public key `rekor_key`. + pub fn verify_signature(&self, rekor_key: &CosignVerificationKey) -> Result<(), SigstoreError> { + rekor_key.verify_signature( + Signature::Raw(&self.signature.raw), + self.note.marshal().as_bytes(), + ) + } +} + +impl Serialize for SignedCheckpoint { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.to_string().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for SignedCheckpoint { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + ::deserialize(deserializer).and_then(|s| { + SignedCheckpoint::from_str(&s).map_err(|DecodeError(err)| serde::de::Error::custom(err)) + }) + } +} + +impl ToString for CheckpointSignature { + fn to_string(&self) -> String { + let sig_b64 = + BASE64_STANDARD.encode([self.key_fingerprint.as_slice(), self.raw.as_slice()].concat()); + format!("— {} {sig_b64}\n", self.name) + } +} + +impl FromStr for CheckpointSignature { + type Err = ParseCheckpointError; + fn from_str(s: &str) -> Result { + let s = s.trim_start_matches('\n').trim_end_matches('\n'); + let [_, name, sig_b64] = s.split(' ').collect::>()[..] else { + return Err(DecodeError(format!("unexpected signature format {s:?}"))); + }; + let sig = BASE64_STANDARD + .decode(sig_b64.trim_end()) + .map_err(|_| DecodeError("failed to decode signature".to_string()))?; + + // first four bytes of signature are fingerprint of key + let (key_fingerprint, sig) = sig.split_at(4); + let key_fingerprint = key_fingerprint + .try_into() + .map_err(|_| DecodeError("unexpected signature length in checkpoint".to_string()))?; + + Ok(CheckpointSignature { + key_fingerprint, + name: name.to_string(), + raw: sig.to_vec(), + }) + } +} + +#[cfg(test)] +mod test { + #[cfg(test)] + mod test_checkpoint_note { + use crate::rekor::models::checkpoint::CheckpointNote; + use crate::rekor::models::checkpoint::OtherContent::{KeyValue, Value}; + + #[test] + fn test_marshal() { + let test_cases = [ + ( + "Log Checkpoint v0", + 123, + [0; 32], + vec![], + "Log Checkpoint v0\n123\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\n", + ), + ( + "Banana Checkpoint v5", + 9944, + [1; 32], + vec![], + "Banana Checkpoint v5\n9944\nAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQE=\n", ), + ( + "Banana Checkpoint v7", + 9943, + [2; 32], + vec![Value("foo".to_string()), Value("bar".to_string())], + "Banana Checkpoint v7\n9943\nAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI=\nfoo\nbar\n", + ), + ]; + + for (origin, size, hash, other_content, expected) in test_cases { + assert_eq!( + CheckpointNote { + size, + origin: origin.to_string(), + hash, + other_content, + } + .marshal(), + expected + ); + } + } + + #[test] + fn test_unmarshal_valid() { + let test_cases = [ + ( + "valid", + "Log Checkpoint v0", + 123, + [0; 32], + vec![], + "Log Checkpoint v0\n123\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\n", + ), + ( + "valid", + "Banana Checkpoint v5", + 9944, + [1; 32], + vec![], + "Banana Checkpoint v5\n9944\nAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQE=\n", ), + ( + "valid with multiple trailing data lines", + "Banana Checkpoint v7", + 9943, + [2; 32], + vec![Value("foo".to_string()), Value("bar".to_string())], + "Banana Checkpoint v7\n9943\nAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI=\nfoo\nbar\n", + ), + ( + "valid with key-value data line", + "Banana Checkpoint v7", + 9943, + [2; 32], + vec![KeyValue("Timestamp".to_string(), "1689748607742585419".to_string())], + "Banana Checkpoint v7\n9943\nAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI=\nTimestamp: 1689748607742585419\n", + ), + ( + "valid with trailing newlines", + "Banana Checkpoint v7", + 9943, + [2; 32], + vec![], + "Banana Checkpoint v7\n9943\nAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgI=\n\n\n\n", + ), + ]; + + for (desc, origin, size, hash, other_content, input) in test_cases { + let got = CheckpointNote::unmarshal(input); + + let expected = CheckpointNote { + size, + origin: origin.to_string(), + hash, + other_content, + }; + assert_eq!(got, Ok(expected), "failed test case: {desc}"); + } + } + + #[test] + fn test_unmarshal_invalid() { + let test_cases = [( + "invalid - insufficient lines", + "Head\n9944\n", + ), ( + "invalid - empty header", + "\n9944\ndGhlIHZpZXcgZnJvbSB0aGUgdHJlZSB0b3BzIGlzIGdyZWF0IQ==\n", + ), ( + "invalid - missing newline on roothash", + "Log Checkpoint v0\n123\nYmFuYW5hcw==", + ), ( + "invalid size - not a number", + "Log Checkpoint v0\nbananas\ndGhlIHZpZXcgZnJvbSB0aGUgdHJlZSB0b3BzIGlzIGdyZWF0IQ==\n", + ), ( + "invalid size - negative", + "Log Checkpoint v0\n-34\ndGhlIHZpZXcgZnJvbSB0aGUgdHJlZSB0b3BzIGlzIGdyZWF0IQ==\n", + ), + ( + "invalid size - too large", + "Log Checkpoint v0\n3438945738945739845734895735\ndGhlIHZpZXcgZnJvbSB0aGUgdHJlZSB0b3BzIGlzIGdyZWF0IQ==\n", + ), + ( + "invalid roothash - not base64", + "Log Checkpoint v0\n123\nThisIsn'tBase64\n", + ), + ]; + for (desc, data) in test_cases { + assert!( + CheckpointNote::unmarshal(data).is_err(), + "accepted invalid note: {desc}" + ); + } + } + } + + #[cfg(test)] + mod test_checkpoint_signature { + use crate::rekor::models::checkpoint::CheckpointSignature; + use std::str::FromStr; + + #[test] + fn test_to_string_valid_with_url_name() { + let got = CheckpointSignature { + name: "log.example.org".to_string(), + key_fingerprint: [0; 4], + raw: vec![1; 32], + } + .to_string(); + let expected = "— log.example.org AAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB\n"; + assert_eq!(got, expected) + } + + #[test] + fn test_to_string_valid_with_id_name() { + let got = CheckpointSignature { + name: "815f6c60aab9".to_string(), + key_fingerprint: [0; 4], + raw: vec![1; 32], + } + .to_string(); + let expected = "— 815f6c60aab9 AAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB\n"; + assert_eq!(got, expected) + } + + #[test] + fn test_from_str_valid_with_url_name() { + let input = "— log.example.org AAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB\n"; + let expected = CheckpointSignature { + name: "log.example.org".to_string(), + key_fingerprint: [0; 4], + raw: vec![1; 32], + }; + let got = CheckpointSignature::from_str(input); + assert_eq!(got, Ok(expected)) + } + + #[test] + fn test_from_str_valid_with_id_name() { + let input = "— 815f6c60aab9 AAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB\n"; + let expected = CheckpointSignature { + name: "815f6c60aab9".to_string(), + key_fingerprint: [0; 4], + raw: vec![1; 32], + }; + let got = CheckpointSignature::from_str(input); + assert_eq!(got, Ok(expected)) + } + + #[test] + fn test_from_str_valid_with_whitespace() { + let input = "\n— log.example.org AAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB\n\n"; + let expected = CheckpointSignature { + name: "log.example.org".to_string(), + key_fingerprint: [0; 4], + raw: vec![1; 32], + }; + let got = CheckpointSignature::from_str(input); + assert_eq!(got, Ok(expected)) + } + + #[test] + fn test_from_str_invalid_with_spaces_in_name() { + let input = "— Foo Bar AAAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB\n"; + let got = CheckpointSignature::from_str(input); + assert!(got.is_err()) + } + } +} diff --git a/src/rekor/models/consistency_proof.rs b/src/rekor/models/consistency_proof.rs index f819675eed..5ab351449a 100644 --- a/src/rekor/models/consistency_proof.rs +++ b/src/rekor/models/consistency_proof.rs @@ -8,6 +8,8 @@ * Generated by: https://openapi-generator.tech */ +use crate::errors::SigstoreError; +use crate::errors::SigstoreError::{ConsistencyProofError, UnexpectedError}; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)] @@ -23,4 +25,45 @@ impl ConsistencyProof { pub fn new(root_hash: String, hashes: Vec) -> ConsistencyProof { ConsistencyProof { root_hash, hashes } } + + pub fn verify( + &self, + old_size: usize, + old_root: &str, + new_size: usize, + ) -> Result<(), SigstoreError> { + use crate::crypto::merkle::{MerkleProofVerifier, Rfc6269Default}; + + // decode hashes from hex and convert them to the required data structure + // immediately return an error when conversion fails + let proof_hashes = self + .hashes + .iter() + .map(|h| { + hex::decode(h) + .map_err(Into::into) // failed to decode from hex + .and_then(|h| { + <[u8; 32]>::try_from(h).map_err(|err| UnexpectedError(format!("{err:?}"))) + }) + .map(Into::into) + }) + .collect::, _>>()?; + + let old_root = hex::decode(old_root) + .map_err(Into::into) + .and_then(|h| { + <[u8; 32]>::try_from(h).map_err(|err| UnexpectedError(format!("{err:?}"))) + }) + .map(Into::into)?; + + let new_root = hex::decode(&self.root_hash) + .map_err(Into::into) + .and_then(|h| { + <[u8; 32]>::try_from(h).map_err(|err| UnexpectedError(format!("{err:?}"))) + }) + .map(Into::into)?; + + Rfc6269Default::verify_consistency(old_size, new_size, &proof_hashes, &old_root, &new_root) + .map_err(ConsistencyProofError) + } } diff --git a/src/rekor/models/inclusion_proof.rs b/src/rekor/models/inclusion_proof.rs index 3dc6653a14..fdf1cc425f 100644 --- a/src/rekor/models/inclusion_proof.rs +++ b/src/rekor/models/inclusion_proof.rs @@ -8,6 +8,11 @@ * Generated by: https://openapi-generator.tech */ +use crate::crypto::merkle::{MerkleProofVerifier, Rfc6269Default, Rfc6269HasherTrait}; +use crate::crypto::CosignVerificationKey; +use crate::errors::SigstoreError; +use crate::errors::SigstoreError::{InclusionProofError, UnexpectedError}; +use crate::rekor::models::checkpoint::{CheckpointNote, SignedCheckpoint}; use crate::rekor::TreeSize; use serde::{Deserialize, Serialize}; @@ -25,6 +30,7 @@ pub struct InclusionProof { /// A list of hashes required to compute the inclusion proof, sorted in order from leaf to root #[serde(rename = "hashes")] pub hashes: Vec, + pub checkpoint: Option, } impl InclusionProof { @@ -33,12 +39,77 @@ impl InclusionProof { root_hash: String, tree_size: TreeSize, hashes: Vec, + checkpoint: Option, ) -> InclusionProof { InclusionProof { log_index, root_hash, tree_size, hashes, + checkpoint, } } + + /// Verify that the canonically encoded `entry` is included in the log, + /// and the included checkpoint was signed by the log. + pub fn verify( + &self, + entry: &[u8], + rekor_key: &CosignVerificationKey, + ) -> Result<(), SigstoreError> { + // enforce that there is a checkpoint + let checkpoint = self.checkpoint.as_ref().ok_or(UnexpectedError( + "inclusion proof misses checkpoint".to_string(), + ))?; + + // make sure we don't just accept any random checkpoint + self.verify_checkpoint_sanity(&checkpoint.note)?; + + // verify the checkpoint signature + checkpoint.verify_signature(rekor_key)?; + + let entry_hash = Rfc6269Default::hash_leaf(entry); + + // decode hashes from hex and convert them to the required data structure + // immediately return an error when conversion fails + let proof_hashes = self + .hashes + .iter() + .map(|h| { + hex::decode(h) + .map_err(Into::into) + .and_then(|h| { + <[u8; 32]>::try_from(h).map_err(|err| UnexpectedError(format!("{err:?}"))) + }) + .map(Into::into) + }) + .collect::, _>>()?; + + let entry_hash = hex::decode(entry_hash) + .map_err(Into::into) + .and_then(|h| { + <[u8; 32]>::try_from(h).map_err(|err| UnexpectedError(format!("{err:?}"))) + }) + .map(Into::into)?; + let root_hash = hex::decode(&self.root_hash) + .map_err(Into::into) + .and_then(|h| { + <[u8; 32]>::try_from(h).map_err(|err| UnexpectedError(format!("{err:?}"))) + }) + .map(Into::into)?; + + Rfc6269Default::verify_inclusion( + self.log_index as usize, + &entry_hash, + self.tree_size as usize, + &proof_hashes, + &root_hash, + ) + .map_err(InclusionProofError) + } + + /// verify that the checkpoint actually can be used to verify this inclusion proof + fn verify_checkpoint_sanity(&self, _note: &CheckpointNote) -> Result<(), SigstoreError> { + todo!() + } } diff --git a/src/rekor/models/log_entry.rs b/src/rekor/models/log_entry.rs index b3b86f2fef..6d54660231 100644 --- a/src/rekor/models/log_entry.rs +++ b/src/rekor/models/log_entry.rs @@ -1,7 +1,10 @@ use crate::errors::SigstoreError; -use crate::rekor::TreeSize; use base64::{engine::general_purpose::STANDARD as BASE64_STD_ENGINE, Engine as _}; +use crate::crypto::CosignVerificationKey; +use crate::errors::SigstoreError::UnexpectedError; +use crate::rekor::models::InclusionProof; +use olpc_cjson::CanonicalFormatter; use serde::{Deserialize, Serialize}; use serde_json::{json, Error, Value}; use std::collections::HashMap; @@ -87,12 +90,21 @@ pub struct Verification { pub signed_entry_timestamp: String, } -/// Stores the signature over the artifact's logID, logIndex, body and integratedTime. -#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct InclusionProof { - pub hashes: Vec, - pub log_index: i64, - pub root_hash: String, - pub tree_size: TreeSize, +impl LogEntry { + pub fn verify_inclusion(&self, rekor_key: &CosignVerificationKey) -> Result<(), SigstoreError> { + self.verification + .inclusion_proof + .as_ref() + .ok_or(UnexpectedError("missing inclusion proof".to_string())) + .and_then(|proof| { + // encode as canonical JSON + let mut encoded_entry = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter( + &mut encoded_entry, + CanonicalFormatter::new(), + ); + self.serialize(&mut ser)?; + proof.verify(&encoded_entry, rekor_key) + }) + } } diff --git a/src/rekor/models/log_info.rs b/src/rekor/models/log_info.rs index 12e2ce72b7..6697457df3 100644 --- a/src/rekor/models/log_info.rs +++ b/src/rekor/models/log_info.rs @@ -8,10 +8,11 @@ * Generated by: https://openapi-generator.tech */ +use crate::rekor::models::checkpoint::SignedCheckpoint; use crate::rekor::TreeSize; use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct LogInfo { /// The current hash value stored at the root of the merkle tree #[serde(rename = "rootHash")] @@ -21,7 +22,7 @@ pub struct LogInfo { pub tree_size: TreeSize, /// The current signed tree head #[serde(rename = "signedTreeHead")] - pub signed_tree_head: String, + pub signed_tree_head: SignedCheckpoint, /// The current treeID #[serde(rename = "treeID")] pub tree_id: Option, @@ -30,7 +31,11 @@ pub struct LogInfo { } impl LogInfo { - pub fn new(root_hash: String, tree_size: TreeSize, signed_tree_head: String) -> LogInfo { + pub fn new( + root_hash: String, + tree_size: TreeSize, + signed_tree_head: SignedCheckpoint, + ) -> LogInfo { LogInfo { root_hash, tree_size, diff --git a/src/rekor/models/mod.rs b/src/rekor/models/mod.rs index 67ba961315..bd44837e77 100644 --- a/src/rekor/models/mod.rs +++ b/src/rekor/models/mod.rs @@ -52,5 +52,6 @@ pub mod tuf; pub use self::tuf::Tuf; pub mod tuf_all_of; pub use self::tuf_all_of::TufAllOf; +pub mod checkpoint; pub mod log_entry; pub use self::log_entry::LogEntry;