Skip to content

Commit

Permalink
rename MerkleSetup to InMemoryMerkle (#564)
Browse files Browse the repository at this point in the history
  • Loading branch information
Dan Laine authored Feb 27, 2024
1 parent 11f2100 commit 6639e34
Show file tree
Hide file tree
Showing 4 changed files with 97 additions and 80 deletions.
3 changes: 2 additions & 1 deletion firewood/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,8 @@ impl<S: ShaleStore<Node> + Send + Sync> DbRev<S> {
values: Vec<V>,
) -> Result<bool, ProofError> {
let hash: [u8; 32] = *self.kv_root_hash()?;
let valid = proof.verify_range_proof(hash, first_key, last_key, keys, values)?;
let valid =
proof.verify_range_proof::<K, V, Bincode>(hash, first_key, last_key, keys, values)?;
Ok(valid)
}
}
Expand Down
66 changes: 41 additions & 25 deletions firewood/src/merkle/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ use crate::nibbles::NibblesIterator;
use crate::{
db::DbError,
merkle::{to_nibble_array, Merkle, MerkleError, Node, NodeType},
merkle_util::{new_merkle, DataStoreError, MerkleSetup},
merkle_util::{DataStoreError, InMemoryMerkle},
};

use super::{BinarySerde, NodeObjRef};
use super::{BinarySerde, EncodedNode, NodeObjRef};

#[derive(Debug, Error)]
pub enum ProofError {
Expand Down Expand Up @@ -146,14 +146,20 @@ impl<N: AsRef<[u8]> + Send> Proof<N> {
self.0.extend(other.0)
}

pub fn verify_range_proof<K: AsRef<[u8]>, V: AsRef<[u8]>>(
pub fn verify_range_proof<K, V, T>(
&self,
root_hash: HashKey,
first_key: K,
last_key: K,
keys: Vec<K>,
vals: Vec<V>,
) -> Result<bool, ProofError> {
) -> Result<bool, ProofError>
where
K: AsRef<[u8]>,
V: AsRef<[u8]>,
T: BinarySerde,
EncodedNode<T>: serde::Serialize + serde::de::DeserializeOwned,
{
if keys.len() != vals.len() {
return Err(ProofError::InconsistentProofData);
}
Expand All @@ -165,17 +171,17 @@ impl<N: AsRef<[u8]> + Send> Proof<N> {
}

// Use in-memory merkle
let mut merkle_setup = new_merkle(0x10000, 0x10000);
let mut in_mem_merkle = InMemoryMerkle::new(0x10000, 0x10000);

// Special case, there is no edge proof at all. The given range is expected
// to be the whole leaf-set in the trie.
if self.0.is_empty() {
for (index, k) in keys.iter().enumerate() {
#[allow(clippy::indexing_slicing)]
merkle_setup.insert(k, vals[index].as_ref().to_vec())?;
in_mem_merkle.insert(k, vals[index].as_ref().to_vec())?;
}

let merkle_root = &*merkle_setup.root_hash()?;
let merkle_root = &*in_mem_merkle.root_hash()?;

return if merkle_root == &root_hash {
Ok(false)
Expand All @@ -188,7 +194,7 @@ impl<N: AsRef<[u8]> + Send> Proof<N> {
// ensure there are no more accounts / slots in the trie.
if keys.is_empty() {
let proof_to_path =
self.proof_to_path(first_key, root_hash, &mut merkle_setup, true)?;
self.proof_to_path(first_key, root_hash, &mut in_mem_merkle, true)?;
return match proof_to_path {
Some(_) => Err(ProofError::InvalidData),
None => Ok(false),
Expand All @@ -199,7 +205,7 @@ impl<N: AsRef<[u8]> + Send> Proof<N> {
// In this case, we can't construct two edge paths. So handle it here.
if keys.len() == 1 && first_key.as_ref() == last_key.as_ref() {
let data =
self.proof_to_path(first_key.as_ref(), root_hash, &mut merkle_setup, false)?;
self.proof_to_path(first_key.as_ref(), root_hash, &mut in_mem_merkle, false)?;

#[allow(clippy::indexing_slicing)]
return if first_key.as_ref() != keys[0].as_ref() {
Expand All @@ -224,29 +230,29 @@ impl<N: AsRef<[u8]> + Send> Proof<N> {
// Convert the edge proofs to edge trie paths. Then we can
// have the same tree architecture with the original one.
// For the first edge proof, non-existent proof is allowed.
self.proof_to_path(first_key.as_ref(), root_hash, &mut merkle_setup, true)?;
self.proof_to_path(first_key.as_ref(), root_hash, &mut in_mem_merkle, true)?;

// Pass the root node here, the second path will be merged
// with the first one. For the last edge proof, non-existent
// proof is also allowed.
self.proof_to_path(last_key.as_ref(), root_hash, &mut merkle_setup, true)?;
self.proof_to_path(last_key.as_ref(), root_hash, &mut in_mem_merkle, true)?;

// Remove all internal caculated values. All the removed parts should
// be re-filled(or re-constructed) by the given leaves range.
let fork_at_root =
unset_internal(&mut merkle_setup, first_key.as_ref(), last_key.as_ref())?;
unset_internal(&mut in_mem_merkle, first_key.as_ref(), last_key.as_ref())?;

// If the fork point is the root, the trie should be empty, start with a new one.
if fork_at_root {
merkle_setup = new_merkle(0x100000, 0x100000);
in_mem_merkle = InMemoryMerkle::new(0x100000, 0x100000);
}

for (key, val) in keys.iter().zip(vals.iter()) {
merkle_setup.insert(key.as_ref(), val.as_ref().to_vec())?;
in_mem_merkle.insert(key.as_ref(), val.as_ref().to_vec())?;
}

// Calculate the hash
let merkle_root = &*merkle_setup.root_hash()?;
let merkle_root = &*in_mem_merkle.root_hash()?;

if merkle_root == &root_hash {
Ok(true)
Expand All @@ -260,16 +266,21 @@ impl<N: AsRef<[u8]> + Send> Proof<N> {
/// necessary nodes will be resolved and leave the remaining as hashnode.
///
/// The given edge proof is allowed to be an existent or non-existent proof.
fn proof_to_path<K: AsRef<[u8]>, S: ShaleStore<Node> + Send + Sync, T: BinarySerde>(
fn proof_to_path<K, T>(
&self,
key: K,
root_hash: HashKey,
merkle_setup: &mut MerkleSetup<S, T>,
in_mem_merkle: &mut InMemoryMerkle<T>,
allow_non_existent_node: bool,
) -> Result<Option<Vec<u8>>, ProofError> {
) -> Result<Option<Vec<u8>>, ProofError>
where
K: AsRef<[u8]>,
T: BinarySerde,
EncodedNode<T>: serde::Serialize + serde::de::DeserializeOwned,
{
// Start with the sentinel root
let sentinel = merkle_setup.get_sentinel_address();
let merkle = merkle_setup.get_merkle_mut();
let sentinel = in_mem_merkle.get_sentinel_address();
let merkle = in_mem_merkle.get_merkle_mut();
let mut parent_node_ref = merkle
.get_node(sentinel)
.map_err(|_| ProofError::NoSuchNode)?;
Expand Down Expand Up @@ -472,19 +483,24 @@ fn generate_subproof(encoded: &[u8]) -> Result<SubProof, ProofError> {
//
// The return value indicates if the fork point is root node. If so, unset the
// entire trie.
fn unset_internal<K: AsRef<[u8]>, S: ShaleStore<Node> + Send + Sync, T: BinarySerde>(
merkle_setup: &mut MerkleSetup<S, T>,
fn unset_internal<K, T>(
in_mem_merkle: &mut InMemoryMerkle<T>,
left: K,
right: K,
) -> Result<bool, ProofError> {
) -> Result<bool, ProofError>
where
K: AsRef<[u8]>,
T: BinarySerde,
EncodedNode<T>: serde::Serialize + serde::de::DeserializeOwned,
{
// Add the sentinel root
let mut left_chunks = vec![0];
left_chunks.extend(left.as_ref().iter().copied().flat_map(to_nibble_array));
// Add the sentinel root
let mut right_chunks = vec![0];
right_chunks.extend(right.as_ref().iter().copied().flat_map(to_nibble_array));
let root = merkle_setup.get_sentinel_address();
let merkle = merkle_setup.get_merkle_mut();
let root = in_mem_merkle.get_sentinel_address();
let merkle = in_mem_merkle.get_merkle_mut();
let mut u_ref = merkle.get_node(root).map_err(|_| ProofError::NoSuchNode)?;
let mut parent = DiskAddress::null();

Expand Down
92 changes: 48 additions & 44 deletions firewood/src/merkle_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
use crate::{
merkle::{
proof::{Proof, ProofError},
BinarySerde, Bincode, Merkle, Node, Ref, RefMut, TrieHash,
BinarySerde, EncodedNode, Merkle, Node, Ref, RefMut, TrieHash,
},
shale::{
self, cached::DynamicMem, compact::CompactSpace, disk_address::DiskAddress, CachedStore,
ShaleStore, StoredView,
StoredView,
},
};
use std::num::NonZeroUsize;
Expand Down Expand Up @@ -36,12 +36,53 @@ pub enum DataStoreError {
ProofEmptyKeyValuesError,
}

pub struct MerkleSetup<S, T> {
type InMemoryStore = CompactSpace<Node, DynamicMem>;

pub struct InMemoryMerkle<T> {
root: DiskAddress,
merkle: Merkle<S, T>,
merkle: Merkle<InMemoryStore, T>,
}

impl<S: ShaleStore<Node> + Send + Sync, T: BinarySerde> MerkleSetup<S, T> {
impl<T> InMemoryMerkle<T>
where
T: BinarySerde,
EncodedNode<T>: serde::Serialize + serde::de::DeserializeOwned,
{
pub fn new(meta_size: u64, compact_size: u64) -> Self {
const RESERVED: usize = 0x1000;
assert!(meta_size as usize > RESERVED);
assert!(compact_size as usize > RESERVED);
let mut dm = DynamicMem::new(meta_size, 0);
let compact_header = DiskAddress::null();
#[allow(clippy::unwrap_used)]
dm.write(
compact_header.into(),
&shale::to_dehydrated(&shale::compact::CompactSpaceHeader::new(
NonZeroUsize::new(RESERVED).unwrap(),
#[allow(clippy::unwrap_used)]
NonZeroUsize::new(RESERVED).unwrap(),
))
.unwrap(),
);
#[allow(clippy::unwrap_used)]
let compact_header =
StoredView::ptr_to_obj(&dm, compact_header, shale::compact::CompactHeader::MSIZE)
.unwrap();
let mem_meta = dm;
let mem_payload = DynamicMem::new(compact_size, 0x1);

let cache = shale::ObjCache::new(1);
let space =
shale::compact::CompactSpace::new(mem_meta, mem_payload, compact_header, cache, 10, 16)
.expect("CompactSpace init fail");

let merkle = Merkle::new(Box::new(space));
#[allow(clippy::unwrap_used)]
let root = merkle.init_root().unwrap();

InMemoryMerkle { root, merkle }
}

pub fn insert<K: AsRef<[u8]>>(&mut self, key: K, val: Vec<u8>) -> Result<(), DataStoreError> {
self.merkle
.insert(key, val, self.root)
Expand All @@ -63,7 +104,7 @@ impl<S: ShaleStore<Node> + Send + Sync, T: BinarySerde> MerkleSetup<S, T> {
pub fn get_mut<K: AsRef<[u8]>>(
&mut self,
key: K,
) -> Result<Option<RefMut<S, T>>, DataStoreError> {
) -> Result<Option<RefMut<InMemoryStore, T>>, DataStoreError> {
self.merkle
.get_mut(key, self.root)
.map_err(|_err| DataStoreError::GetError)
Expand All @@ -73,7 +114,7 @@ impl<S: ShaleStore<Node> + Send + Sync, T: BinarySerde> MerkleSetup<S, T> {
self.root
}

pub fn get_merkle_mut(&mut self) -> &mut Merkle<S, T> {
pub fn get_merkle_mut(&mut self) -> &mut Merkle<InMemoryStore, T> {
&mut self.merkle
}

Expand Down Expand Up @@ -120,40 +161,3 @@ impl<S: ShaleStore<Node> + Send + Sync, T: BinarySerde> MerkleSetup<S, T> {
proof.verify_range_proof(hash, first_key, last_key, keys, vals)
}
}

pub fn new_merkle(
meta_size: u64,
compact_size: u64,
) -> MerkleSetup<CompactSpace<Node, DynamicMem>, Bincode> {
const RESERVED: usize = 0x1000;
assert!(meta_size as usize > RESERVED);
assert!(compact_size as usize > RESERVED);
let mut dm = DynamicMem::new(meta_size, 0);
let compact_header = DiskAddress::null();
#[allow(clippy::unwrap_used)]
dm.write(
compact_header.into(),
&shale::to_dehydrated(&shale::compact::CompactSpaceHeader::new(
NonZeroUsize::new(RESERVED).unwrap(),
#[allow(clippy::unwrap_used)]
NonZeroUsize::new(RESERVED).unwrap(),
))
.unwrap(),
);
#[allow(clippy::unwrap_used)]
let compact_header =
StoredView::ptr_to_obj(&dm, compact_header, shale::compact::CompactHeader::MSIZE).unwrap();
let mem_meta = dm;
let mem_payload = DynamicMem::new(compact_size, 0x1);

let cache = shale::ObjCache::new(1);
let space =
shale::compact::CompactSpace::new(mem_meta, mem_payload, compact_header, cache, 10, 16)
.expect("CompactSpace init fail");

let merkle = Merkle::new(Box::new(space));
#[allow(clippy::unwrap_used)]
let root = merkle.init_root().unwrap();

MerkleSetup { root, merkle }
}
16 changes: 6 additions & 10 deletions firewood/tests/merkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,21 @@
// See the file LICENSE.md for licensing terms.

use firewood::{
merkle::{Bincode, Node, Proof, ProofError},
merkle_util::{new_merkle, DataStoreError, MerkleSetup},
// TODO: we should not be using shale from an integration test
shale::{cached::DynamicMem, compact::CompactSpace},
merkle::{Bincode, Proof, ProofError},
merkle_util::{DataStoreError, InMemoryMerkle},
};
use rand::Rng;
use std::{collections::HashMap, fmt::Write};

type Store = CompactSpace<Node, DynamicMem>;

fn merkle_build_test<
K: AsRef<[u8]> + std::cmp::Ord + Clone + std::fmt::Debug,
V: AsRef<[u8]> + Clone,
>(
items: Vec<(K, V)>,
meta_size: u64,
compact_size: u64,
) -> Result<MerkleSetup<Store, Bincode>, DataStoreError> {
let mut merkle = new_merkle(meta_size, compact_size);
) -> Result<InMemoryMerkle<Bincode>, DataStoreError> {
let mut merkle = InMemoryMerkle::new(meta_size, compact_size);
for (k, v) in items.iter() {
merkle.insert(k, v.as_ref().to_vec())?;
}
Expand Down Expand Up @@ -113,7 +109,7 @@ fn test_root_hash_reversed_deletions() -> Result<(), DataStoreError> {

items.sort();

let mut merkle = new_merkle(0x100000, 0x100000);
let mut merkle: InMemoryMerkle<Bincode> = InMemoryMerkle::new(0x100000, 0x100000);

let mut hashes = Vec::new();

Expand Down Expand Up @@ -182,7 +178,7 @@ fn test_root_hash_random_deletions() -> Result<(), DataStoreError> {
let mut items_ordered: Vec<_> = items.iter().map(|(k, v)| (k.clone(), v.clone())).collect();
items_ordered.sort();
items_ordered.shuffle(&mut *rng.borrow_mut());
let mut merkle = new_merkle(0x100000, 0x100000);
let mut merkle: InMemoryMerkle<Bincode> = InMemoryMerkle::new(0x100000, 0x100000);

for (k, v) in items.iter() {
merkle.insert(k, v.to_vec())?;
Expand Down

0 comments on commit 6639e34

Please sign in to comment.