diff --git a/applications/tari_swarm_daemon/src/main.rs b/applications/tari_swarm_daemon/src/main.rs index bee6c228d..8a3eed608 100644 --- a/applications/tari_swarm_daemon/src/main.rs +++ b/applications/tari_swarm_daemon/src/main.rs @@ -197,7 +197,8 @@ async fn start(cli: &Cli) -> anyhow::Result<()> { let lock_file = config.base_dir.join("tari_swarm.pid"); let _pid = lockfile::Lockfile::create(&lock_file).with_context(|| { anyhow!( - "Failed to acquire lockfile at {}. Is another instance already running?", + "Failed to acquire lockfile at '{}'. Is another instance already running? If not, swarm may have \ + previously crashed and you may remove the lockfile.", lock_file.display() ) })?; diff --git a/dan_layer/rpc_state_sync/src/manager.rs b/dan_layer/rpc_state_sync/src/manager.rs index d81ac7009..21e16773d 100644 --- a/dan_layer/rpc_state_sync/src/manager.rs +++ b/dan_layer/rpc_state_sync/src/manager.rs @@ -207,11 +207,11 @@ where TConsensusSpec: ConsensusSpec let change = match &transition.update { SubstateUpdate::Create(create) => SubstateTreeChange::Up { - id: create.substate.substate_id.clone(), + id: create.substate.to_versioned_substate_id(), value_hash: hash_substate(&create.substate.substate_value, create.substate.version), }, SubstateUpdate::Destroy(destroy) => SubstateTreeChange::Down { - id: destroy.substate_id.clone(), + id: destroy.to_versioned_substate_id() }, }; diff --git a/dan_layer/state_tree/src/bit_iter.rs b/dan_layer/state_tree/src/bit_iter.rs new file mode 100644 index 000000000..bd77b2b89 --- /dev/null +++ b/dan_layer/state_tree/src/bit_iter.rs @@ -0,0 +1,54 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use std::ops::Range; + +/// An iterator over a hash value that generates one bit for each iteration. +pub struct BitIterator<'a> { + /// The reference to the bytes that represent the `HashValue`. + bytes: &'a [u8], + pos: Range, + // invariant hash_bytes.len() == HashValue::LENGTH; + // invariant pos.end == hash_bytes.len() * 8; +} + +impl<'a> BitIterator<'a> { + /// Constructs a new `BitIterator` using given `HashValue`. + pub fn new(bytes: &'a [u8]) -> Self { + BitIterator { + bytes, + pos: 0..bytes.len() * 8, + } + } + + /// Returns the `index`-th bit in the bytes. + fn get_bit(&self, index: usize) -> bool { + // MIRAI annotations - important? + // assume!(index < self.pos.end); // assumed precondition + // assume!(self.hash_bytes.len() == 32); // invariant + // assume!(self.pos.end == self.hash_bytes.len() * 8); // invariant + let pos = index / 8; + let bit = 7 - index % 8; + (self.bytes[pos] >> bit) & 1 != 0 + } +} + +impl<'a> Iterator for BitIterator<'a> { + type Item = bool; + + fn next(&mut self) -> Option { + self.pos.next().map(|x| self.get_bit(x)) + } + + fn size_hint(&self) -> (usize, Option) { + self.pos.size_hint() + } +} + +impl<'a> DoubleEndedIterator for BitIterator<'a> { + fn next_back(&mut self) -> Option { + self.pos.next_back().map(|x| self.get_bit(x)) + } +} + +impl<'a> ExactSizeIterator for BitIterator<'a> {} diff --git a/dan_layer/state_tree/src/jellyfish/error.rs b/dan_layer/state_tree/src/jellyfish/error.rs new file mode 100644 index 000000000..7f1f53011 --- /dev/null +++ b/dan_layer/state_tree/src/jellyfish/error.rs @@ -0,0 +1,30 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use crate::{Hash, LeafKey}; + +#[derive(Debug, thiserror::Error)] +pub enum JmtProofVerifyError { + #[error("Sparse Merkle Tree proof has more than 256 ({num_siblings}) siblings.")] + TooManySiblings { num_siblings: usize }, + #[error("Keys do not match. Key in proof: {actual_key}. Expected key: {expected_key}.")] + KeyMismatch { actual_key: LeafKey, expected_key: LeafKey }, + #[error("Value hashes do not match. Value hash in proof: {actual}. Expected value hash: {expected}.")] + ValueMismatch { actual: Hash, expected: Hash }, + #[error("Expected inclusion proof. Found non-inclusion proof.")] + ExpectedInclusionProof, + #[error("Expected non-inclusion proof, but key exists in proof.")] + ExpectedNonInclusionProof, + #[error( + "Key would not have ended up in the subtree where the provided key in proof is the only existing key, if it \ + existed. So this is not a valid non-inclusion proof." + )] + InvalidNonInclusionProof, + #[error( + "Root hashes do not match. Actual root hash: {actual_root_hash}. Expected root hash: {expected_root_hash}." + )] + RootHashMismatch { + actual_root_hash: Hash, + expected_root_hash: Hash, + }, +} diff --git a/dan_layer/state_tree/src/jellyfish/mod.rs b/dan_layer/state_tree/src/jellyfish/mod.rs index 1c6a6d89a..54b5754c4 100644 --- a/dan_layer/state_tree/src/jellyfish/mod.rs +++ b/dan_layer/state_tree/src/jellyfish/mod.rs @@ -10,6 +10,7 @@ pub use tree::*; mod types; pub use types::*; +mod error; mod store; pub use store::*; diff --git a/dan_layer/state_tree/src/jellyfish/tree.rs b/dan_layer/state_tree/src/jellyfish/tree.rs index 188da6636..2636d32e5 100644 --- a/dan_layer/state_tree/src/jellyfish/tree.rs +++ b/dan_layer/state_tree/src/jellyfish/tree.rs @@ -375,7 +375,7 @@ impl<'a, R: 'a + TreeStoreReader

, P: Clone> JellyfishMerkleTree<'a, R, P> { if kvs.len() == 1 && kvs[0].0 == existing_leaf_key { if let (key, Some((value_hash, payload))) = kvs[0] { - let new_leaf_node = Node::new_leaf(key.clone(), *value_hash, payload.clone(), version); + let new_leaf_node = Node::new_leaf(*key, *value_hash, payload.clone(), version); Ok(Some(new_leaf_node)) } else { Ok(None) @@ -454,7 +454,7 @@ impl<'a, R: 'a + TreeStoreReader

, P: Clone> JellyfishMerkleTree<'a, R, P> { ) -> Result>, JmtStorageError> { if kvs.len() == 1 { if let (key, Some((value_hash, payload))) = kvs[0] { - let new_leaf_node = Node::new_leaf(key.clone(), *value_hash, payload.clone(), version); + let new_leaf_node = Node::new_leaf(*key, *value_hash, payload.clone(), version); Ok(Some(new_leaf_node)) } else { Ok(None) diff --git a/dan_layer/state_tree/src/jellyfish/types.rs b/dan_layer/state_tree/src/jellyfish/types.rs index 12a41db04..07cc644b6 100644 --- a/dan_layer/state_tree/src/jellyfish/types.rs +++ b/dan_layer/state_tree/src/jellyfish/types.rs @@ -81,7 +81,7 @@ // Copyright (c) Aptos // SPDX-License-Identifier: Apache-2.0 -use std::{fmt, io, ops::Range}; +use std::{fmt, fmt::Display, io, ops::Range}; use blake2::{digest::consts::U32, Blake2b}; use indexmap::IndexMap; @@ -94,7 +94,10 @@ use tari_crypto::{ use tari_dan_common_types::optional::IsNotFoundError; use tari_engine_types::serde_with; -use crate::jellyfish::store::TreeStoreReader; +use crate::{ + bit_iter::BitIterator, + jellyfish::{error::JmtProofVerifyError, store::TreeStoreReader}, +}; pub type Hash = tari_common_types::types::FixedHash; @@ -146,7 +149,7 @@ pub struct SparseMerkleProofExt { impl SparseMerkleProofExt { /// Constructs a new `SparseMerkleProofExt` using leaf and a list of sibling nodes. - pub fn new(leaf: Option, siblings: Vec) -> Self { + pub(crate) fn new(leaf: Option, siblings: Vec) -> Self { Self { leaf, siblings } } @@ -159,6 +162,106 @@ impl SparseMerkleProofExt { pub fn siblings(&self) -> &[NodeInProof] { &self.siblings } + + /// Verifies an element whose key is `element_key` and value is `element_value` exists in the Sparse Merkle Tree + /// using the provided proof + pub fn verify_inclusion( + &self, + expected_root_hash: &Hash, + element_key: &LeafKey, + element_value_hash: &Hash, + ) -> Result<(), JmtProofVerifyError> { + self.verify(expected_root_hash, element_key, Some(element_value_hash)) + } + + /// Verifies the proof is a valid non-inclusion proof that shows this key doesn't exist in the tree. + pub fn verify_exclusion( + &self, + expected_root_hash: &Hash, + element_key: &LeafKey, + ) -> Result<(), JmtProofVerifyError> { + self.verify(expected_root_hash, element_key, None) + } + + /// If `element_value` is present, verifies an element whose key is `element_key` and value is + /// `element_value` exists in the Sparse Merkle Tree using the provided proof. Otherwise, + /// verifies the proof is a valid non-inclusion proof that shows this key doesn't exist in the + /// tree. + fn verify( + &self, + expected_root_hash: &Hash, + element_key: &LeafKey, + element_value: Option<&Hash>, + ) -> Result<(), JmtProofVerifyError> { + if self.siblings.len() > 256 { + return Err(JmtProofVerifyError::TooManySiblings { + num_siblings: self.siblings.len(), + }); + } + + match (element_value, &self.leaf) { + (Some(value_hash), Some(leaf)) => { + // This is an inclusion proof, so the key and value hash provided in the proof + // should match element_key and element_value_hash. `siblings` should prove the + // route from the leaf node to the root. + if element_key != leaf.key() { + return Err(JmtProofVerifyError::KeyMismatch { + actual_key: *leaf.key(), + expected_key: *element_key, + }); + } + if *value_hash != leaf.value_hash { + return Err(JmtProofVerifyError::ValueMismatch { + actual: leaf.value_hash, + expected: *value_hash, + }); + } + }, + (Some(_), None) => return Err(JmtProofVerifyError::ExpectedInclusionProof), + (None, Some(leaf)) => { + // This is a non-inclusion proof. The proof intends to show that if a leaf node + // representing `element_key` is inserted, it will break a currently existing leaf + // node represented by `proof_key` into a branch. `siblings` should prove the + // route from that leaf node to the root. + if element_key == leaf.key() { + return Err(JmtProofVerifyError::ExpectedNonInclusionProof); + } + if element_key.common_prefix_bits_len(leaf.key()) < self.siblings.len() { + return Err(JmtProofVerifyError::InvalidNonInclusionProof); + } + }, + (None, None) => { + // This is a non-inclusion proof. The proof intends to show that if a leaf node + // representing `element_key` is inserted, it will show up at a currently empty + // position. `sibling` should prove the route from this empty position to the root. + }, + } + + let current_hash = self + .leaf + .clone() + .map_or(SPARSE_MERKLE_PLACEHOLDER_HASH, |leaf| leaf.hash()); + let actual_root_hash = self + .siblings + .iter() + .zip(element_key.iter_bits().rev().skip(256 - self.siblings.len())) + .fold(current_hash, |hash, (sibling_node, bit)| { + if bit { + SparseMerkleInternalNode::new(sibling_node.hash(), hash).hash() + } else { + SparseMerkleInternalNode::new(hash, sibling_node.hash()).hash() + } + }); + + if actual_root_hash != *expected_root_hash { + return Err(JmtProofVerifyError::RootHashMismatch { + actual_root_hash, + expected_root_hash: *expected_root_hash, + }); + } + + Ok(()) + } } impl From for SparseMerkleProof { @@ -552,8 +655,8 @@ impl NibblePath { } /// Get a bit iterator iterates over the whole nibble path. - pub fn bits(&self) -> BitIterator { - BitIterator { + pub fn bits(&self) -> NibbleBitIterator { + NibbleBitIterator { nibble_path: self, pos: (0..self.num_nibbles * 4), } @@ -599,12 +702,12 @@ pub trait Peekable: Iterator { } /// BitIterator iterates a nibble path by bit. -pub struct BitIterator<'a> { +pub struct NibbleBitIterator<'a> { nibble_path: &'a NibblePath, pos: Range, } -impl<'a> Peekable for BitIterator<'a> { +impl<'a> Peekable for NibbleBitIterator<'a> { /// Returns the `next()` value without advancing the iterator. fn peek(&self) -> Option { if self.pos.start < self.pos.end { @@ -616,7 +719,7 @@ impl<'a> Peekable for BitIterator<'a> { } /// BitIterator spits out a boolean each time. True/false denotes 1/0. -impl<'a> Iterator for BitIterator<'a> { +impl<'a> Iterator for NibbleBitIterator<'a> { type Item = bool; fn next(&mut self) -> Option { @@ -625,7 +728,7 @@ impl<'a> Iterator for BitIterator<'a> { } /// Support iterating bits in reversed order. -impl<'a> DoubleEndedIterator for BitIterator<'a> { +impl<'a> DoubleEndedIterator for NibbleBitIterator<'a> { fn next_back(&mut self) -> Option { self.pos.next_back().map(|i| self.nibble_path.get_bit(i)) } @@ -690,8 +793,8 @@ impl<'a> NibbleIterator<'a> { } /// Turn it into a `BitIterator`. - pub fn bits(&self) -> BitIterator<'a> { - BitIterator { + pub fn bits(&self) -> NibbleBitIterator<'a> { + NibbleBitIterator { nibble_path: self.nibble_path, pos: (self.pos.start * 4..self.pos.end * 4), } @@ -717,7 +820,7 @@ impl<'a> NibbleIterator<'a> { // INITIAL-MODIFICATION: We will use this type (instead of `Hash`) to allow for arbitrary key length /// A leaf key (i.e. a complete nibble path). -#[derive(Clone, Debug, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] +#[derive(Clone, Debug, Copy, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] pub struct LeafKey { /// The underlying bytes. /// All leaf keys of the same tree must be of the same length - otherwise the tree's behavior @@ -737,6 +840,23 @@ impl LeafKey { pub fn as_ref(&self) -> LeafKeyRef<'_> { LeafKeyRef::new(self.bytes.as_slice()) } + + pub fn iter_bits(&self) -> BitIterator<'_> { + BitIterator::new(self.bytes.as_slice()) + } + + pub fn common_prefix_bits_len(&self, other: &LeafKey) -> usize { + self.iter_bits() + .zip(other.iter_bits()) + .take_while(|(x, y)| x == y) + .count() + } +} + +impl Display for LeafKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.bytes.fmt(f) + } } // INITIAL-MODIFICATION: We will use this type (instead of `Hash`) to allow for arbitrary key length diff --git a/dan_layer/state_tree/src/key_mapper.rs b/dan_layer/state_tree/src/key_mapper.rs index 88f574d75..71af284e5 100644 --- a/dan_layer/state_tree/src/key_mapper.rs +++ b/dan_layer/state_tree/src/key_mapper.rs @@ -1,7 +1,7 @@ // Copyright 2024 The Tari Project // SPDX-License-Identifier: BSD-3-Clause -use tari_engine_types::substate::SubstateId; +use tari_dan_common_types::VersionedSubstateId; use crate::{jellyfish::LeafKey, Hash}; @@ -11,8 +11,8 @@ pub trait DbKeyMapper { pub struct SpreadPrefixKeyMapper; -impl DbKeyMapper for SpreadPrefixKeyMapper { - fn map_to_leaf_key(id: &SubstateId) -> LeafKey { +impl DbKeyMapper for SpreadPrefixKeyMapper { + fn map_to_leaf_key(id: &VersionedSubstateId) -> LeafKey { let hash = crate::jellyfish::jmt_node_hash(id); LeafKey::new(hash) } diff --git a/dan_layer/state_tree/src/lib.rs b/dan_layer/state_tree/src/lib.rs index 2e9d25432..ce3157a2c 100644 --- a/dan_layer/state_tree/src/lib.rs +++ b/dan_layer/state_tree/src/lib.rs @@ -12,5 +12,7 @@ pub mod memory_store; mod staged_store; pub use staged_store::*; +mod bit_iter; mod tree; + pub use tree::*; diff --git a/dan_layer/state_tree/src/tree.rs b/dan_layer/state_tree/src/tree.rs index 91d0464ee..92421b087 100644 --- a/dan_layer/state_tree/src/tree.rs +++ b/dan_layer/state_tree/src/tree.rs @@ -4,13 +4,14 @@ use std::{iter::Peekable, marker::PhantomData}; use serde::{Deserialize, Serialize}; -use tari_engine_types::substate::SubstateId; +use tari_dan_common_types::VersionedSubstateId; use crate::{ error::StateTreeError, jellyfish::{Hash, JellyfishMerkleTree, SparseMerkleProofExt, TreeStore, Version}, key_mapper::{DbKeyMapper, HashIdentityKeyMapper, SpreadPrefixKeyMapper}, memory_store::MemoryTreeStore, + LeafKey, Node, NodeKey, ProofValue, @@ -37,16 +38,16 @@ impl<'a, S, M> StateTree<'a, S, M> { } } -impl<'a, S: TreeStoreReader, M: DbKeyMapper> StateTree<'a, S, M> { +impl<'a, S: TreeStoreReader, M: DbKeyMapper> StateTree<'a, S, M> { pub fn get_proof( &self, version: Version, - key: &SubstateId, - ) -> Result<(Option>, SparseMerkleProofExt), StateTreeError> { + key: &VersionedSubstateId, + ) -> Result<(LeafKey, Option>, SparseMerkleProofExt), StateTreeError> { let smt = JellyfishMerkleTree::new(self.store); let key = M::map_to_leaf_key(key); let (maybe_value, proof) = smt.get_with_proof_ext(key.as_ref(), version)?; - Ok((maybe_value, proof)) + Ok((key, maybe_value, proof)) } pub fn get_root_hash(&self, version: Version) -> Result { @@ -56,7 +57,7 @@ impl<'a, S: TreeStoreReader, M: DbKeyMapper> StateTree<'a, } } -impl<'a, S: TreeStore, M: DbKeyMapper> StateTree<'a, S, M> { +impl<'a, S: TreeStore, M: DbKeyMapper> StateTree<'a, S, M> { fn calculate_substate_changes>( &mut self, current_version: Option, @@ -136,7 +137,7 @@ impl<'a, S: TreeStore<()>, M: DbKeyMapper> StateTree<'a, S, M> { /// Calculates the new root hash and tree updates for the given substate changes. fn calculate_substate_changes< S: TreeStoreReader, - M: DbKeyMapper, + M: DbKeyMapper, I: IntoIterator, >( store: &mut S, @@ -157,12 +158,12 @@ fn calculate_substate_changes< } pub enum SubstateTreeChange { - Up { id: SubstateId, value_hash: Hash }, - Down { id: SubstateId }, + Up { id: VersionedSubstateId, value_hash: Hash }, + Down { id: VersionedSubstateId }, } impl SubstateTreeChange { - pub fn id(&self) -> &SubstateId { + pub fn id(&self) -> &VersionedSubstateId { match self { Self::Up { id, .. } => id, Self::Down { id } => id, diff --git a/dan_layer/state_tree/tests/support.rs b/dan_layer/state_tree/tests/support.rs index 88adf30d3..2fae20320 100644 --- a/dan_layer/state_tree/tests/support.rs +++ b/dan_layer/state_tree/tests/support.rs @@ -1,6 +1,7 @@ // Copyright 2024 The Tari Project // SPDX-License-Identifier: BSD-3-Clause +use tari_dan_common_types::VersionedSubstateId; use tari_engine_types::{hashing::substate_value_hasher32, substate::SubstateId}; use tari_state_tree::{ key_mapper::DbKeyMapper, @@ -14,20 +15,26 @@ use tari_state_tree::{ }; use tari_template_lib::models::{ComponentAddress, ObjectKey}; -pub fn change(substate_id_seed: u8, value_seed: Option) -> SubstateTreeChange { - change_exact( - SubstateId::Component(ComponentAddress::new(ObjectKey::from_array( - [substate_id_seed; ObjectKey::LENGTH], - ))), - value_seed.map(from_seed), +pub fn make_value(seed: u8) -> VersionedSubstateId { + VersionedSubstateId::new( + SubstateId::Component(ComponentAddress::new(ObjectKey::from_array([seed; ObjectKey::LENGTH]))), + u32::from(seed), ) } +pub fn change(substate_id_seed: u8, value_seed: Option) -> SubstateTreeChange { + change_exact(make_value(substate_id_seed), value_seed.map(from_seed)) +} + fn hash_value(value: &[u8]) -> Hash { substate_value_hasher32().chain(value).result().into_array().into() } -pub fn change_exact(substate_id: SubstateId, value: Option>) -> SubstateTreeChange { +pub fn hash_value_from_seed(seed: u8) -> Hash { + hash_value(&from_seed(seed)) +} + +pub fn change_exact(substate_id: VersionedSubstateId, value: Option>) -> SubstateTreeChange { value .map(|value| SubstateTreeChange::Up { id: substate_id.clone(), @@ -63,13 +70,17 @@ impl> HashTreeTester { self.put_changes_at_version(current_version, next_version, changes) } + pub fn create_state_tree(&mut self) -> StateTree<'_, S, TestMapper> { + StateTree::<_, TestMapper>::new(&mut self.tree_store) + } + pub fn put_changes_at_version( &mut self, current_version: Option, next_version: Version, changes: impl IntoIterator, ) -> Hash { - StateTree::<_, TestMapper>::new(&mut self.tree_store) + self.create_state_tree() .put_substate_changes(current_version, next_version, changes) .unwrap() } @@ -83,8 +94,8 @@ impl HashTreeTester> { pub struct TestMapper; -impl DbKeyMapper for TestMapper { - fn map_to_leaf_key(id: &SubstateId) -> LeafKey { +impl DbKeyMapper for TestMapper { + fn map_to_leaf_key(id: &VersionedSubstateId) -> LeafKey { LeafKey::new(test_hasher32().chain(&id).result().into_array().into()) } } diff --git a/dan_layer/state_tree/tests/test.rs b/dan_layer/state_tree/tests/test.rs index b48685064..4e6f1881a 100644 --- a/dan_layer/state_tree/tests/test.rs +++ b/dan_layer/state_tree/tests/test.rs @@ -7,7 +7,7 @@ use std::collections::HashSet; use itertools::Itertools; use tari_state_tree::{memory_store::MemoryTreeStore, StaleTreeNode, Version, SPARSE_MERKLE_PLACEHOLDER_HASH}; -use crate::support::{change, HashTreeTester}; +use crate::support::{change, hash_value_from_seed, make_value, HashTreeTester}; mod support; #[test] @@ -182,3 +182,53 @@ fn serialized_keys_are_strictly_increasing() { let max_previous_key = previous_keys.iter().max().unwrap(); assert!(min_next_key > max_previous_key); } + +#[test] +fn proofs() { + let mut tester = HashTreeTester::new_empty(); + let root_v1 = tester.put_substate_changes(vec![change(1, Some(30))]); + tester.put_substate_changes(vec![change(2, Some(40))]); + let root_hash = tester.put_substate_changes(vec![change(3, Some(50))]); + + let tree = tester.create_state_tree(); + let (key, proof_value, proof) = tree.get_proof(3, &make_value(1)).unwrap(); + let hash = hash_value_from_seed(30); + assert_eq!(proof_value, Some((hash, 1, 1))); + proof.verify_inclusion(&root_hash, &key, &hash).unwrap(); + let (key, proof_value, proof) = tree.get_proof(3, &make_value(2)).unwrap(); + let hash = hash_value_from_seed(40); + assert_eq!(proof_value, Some((hash, 2, 2))); + proof.verify_inclusion(&root_hash, &key, &hash).unwrap(); + let (key, proof_value, proof) = tree.get_proof(3, &make_value(3)).unwrap(); + let hash = hash_value_from_seed(50); + assert_eq!(proof_value, Some((hash, 3, 3))); + proof.verify_inclusion(&root_hash, &key, &hash).unwrap(); + let (key, proof_value, proof) = tree.get_proof(3, &make_value(3)).unwrap(); + proof + .verify_inclusion(&root_hash, &key, &proof_value.unwrap().0) + .unwrap(); + + // Fail cases: + // Fail to proof exclusion for included value + let (key, _, proof) = tree.get_proof(3, &make_value(3)).unwrap(); + proof.verify_exclusion(&root_hash, &key).unwrap_err(); + // Fail to proof inclusion for excluded value + let (key, _, proof) = tree.get_proof(3, &make_value(1)).unwrap(); + let hash = hash_value_from_seed(50); + proof.verify_inclusion(&root_hash, &key, &hash).unwrap_err(); + // Fail to proof inclusion for old/incorrect merkle root + let (key, proof_value, proof) = tree.get_proof(3, &make_value(3)).unwrap(); + proof + .verify_inclusion(&root_v1, &key, &proof_value.unwrap().0) + .unwrap_err(); + + // Exclusion proof + let (key, proof_value, proof) = tree.get_proof(3, &make_value(4)).unwrap(); + assert!(proof_value.is_none()); + proof.verify_exclusion(&root_hash, &key).unwrap(); + + // Fail to verify exclusion proof + let (key, _, proof) = tree.get_proof(3, &make_value(4)).unwrap(); + let hash = hash_value_from_seed(50); + proof.verify_inclusion(&root_hash, &key, &hash).unwrap_err(); +} diff --git a/dan_layer/storage/src/consensus_models/substate.rs b/dan_layer/storage/src/consensus_models/substate.rs index 1094b0c2b..ab43668a2 100644 --- a/dan_layer/storage/src/consensus_models/substate.rs +++ b/dan_layer/storage/src/consensus_models/substate.rs @@ -344,6 +344,12 @@ pub struct SubstateDestroyedProof { pub destroyed_by_transaction: TransactionId, } +impl SubstateDestroyedProof { + pub fn to_versioned_substate_id(&self) -> VersionedSubstateId { + VersionedSubstateId::new(self.substate_id.clone(), self.version) + } +} + #[derive(Debug, Clone)] pub struct SubstateData { pub substate_id: SubstateId, @@ -353,6 +359,10 @@ pub struct SubstateData { } impl SubstateData { + pub fn to_versioned_substate_id(&self) -> VersionedSubstateId { + VersionedSubstateId::new(self.substate_id.clone(), self.version) + } + pub fn into_substate(self) -> Substate { Substate::new(self.version, self.substate_value) } diff --git a/dan_layer/storage/src/consensus_models/substate_change.rs b/dan_layer/storage/src/consensus_models/substate_change.rs index d284da216..f07727993 100644 --- a/dan_layer/storage/src/consensus_models/substate_change.rs +++ b/dan_layer/storage/src/consensus_models/substate_change.rs @@ -146,12 +146,10 @@ impl From<&SubstateChange> for SubstateTreeChange { fn from(value: &SubstateChange) -> Self { match value { SubstateChange::Up { id, substate, .. } => SubstateTreeChange::Up { - id: id.substate_id().clone(), + id: id.clone(), value_hash: substate.to_value_hash(), }, - SubstateChange::Down { id, .. } => SubstateTreeChange::Down { - id: id.substate_id().clone(), - }, + SubstateChange::Down { id, .. } => SubstateTreeChange::Down { id: id.clone() }, } } } diff --git a/integration_tests/tests/features/eviction.feature b/integration_tests/tests/features/eviction.feature index 29d72b8cd..168cf6db5 100644 --- a/integration_tests/tests/features/eviction.feature +++ b/integration_tests/tests/features/eviction.feature @@ -6,7 +6,6 @@ Feature: Eviction scenarios @flaky - @doit Scenario: Offline validator gets evicted # Initialize a base node, wallet, miner and several VNs Given a base node BASE